How to use tf2onnx - 10 common examples

To help you get started, we’ve selected a few tf2onnx examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github onnx / tensorflow-onnx / tf2onnx / onnx_opset / nn.py View on Github external
def add_padding(ctx, node, kernel_shape, strides, dilations=None, spatial=2):
    padding = node.get_attr("padding")
    if padding:
        if dilations is None:
            dilations = [1] * spatial * 2
        padding = padding.s.decode("utf-8")
        if padding == 'SAME':
            pads = [0] * spatial * 2
            input_shape = ctx.get_shape(node.input[0])
            output_shape = ctx.get_shape(node.output[0])
            # check if the input shape is valid
            if len(input_shape) != len(pads):
                logger.error("node %s input needs to be rank %d, is %d", node.name, len(pads), len(input_shape))
            # transpose shape to nchw
            if node.is_nhwc():
                input_shape = spatial_map(input_shape, constants.NHWC_TO_NCHW)
                output_shape = spatial_map(output_shape, constants.NHWC_TO_NCHW)
            # calculate pads
            if any(input_shape[i + 2] == -1 or output_shape[i + 2] == -1 for i in range(spatial)):
                logger.debug(
                    "node %s has unknown dim for pads calculation, fallback to auto_pad: "
                    "input_shape=%s, output_shape=%s",
                    node.name, input_shape, output_shape)
                node.set_attr("auto_pad", "SAME_UPPER")
            else:
                for i in range(spatial):
                    pad = (output_shape[i + 2] - 1) * strides[i] + dilations[i] * kernel_shape[i] - input_shape[i + 2]
                    pad = max(pad, 0)
                    pads[i] = pad // 2
                    pads[i + spatial] = pad - pad // 2
                node.set_attr("pads", pads)
github onnx / keras-onnx / keras2onnx / ktf2onnx / tf2onnx / onnx_opset / nn.py View on Github external
padding = node.get_attr("padding")
    if padding:
        if dilations is None:
            dilations = [1] * spatial * 2
        padding = padding.s.decode("utf-8")
        if padding == 'SAME':
            pads = [0] * spatial * 2
            input_shape = ctx.get_shape(node.input[0])
            output_shape = ctx.get_shape(node.output[0])
            # check if the input shape is valid
            if len(input_shape) != len(pads):
                logger.error("node %s input needs to be rank %d, is %d", node.name, len(pads), len(input_shape))
            # transpose shape to nchw
            if node.is_nhwc():
                input_shape = spatial_map(input_shape, constants.NHWC_TO_NCHW)
                output_shape = spatial_map(output_shape, constants.NHWC_TO_NCHW)
            # calculate pads
            if any(input_shape[i + 2] == -1 or output_shape[i + 2] == -1 for i in range(spatial)):
                logger.debug(
                    "node %s has unknown dim for pads calculation, fallback to auto_pad: "
                    "input_shape=%s, output_shape=%s",
                    node.name, input_shape, output_shape)
                node.set_attr("auto_pad", "SAME_UPPER")
            else:
                for i in range(spatial):
                    pad = (output_shape[i + 2] - 1) * strides[i] + dilations[i] * kernel_shape[i] - input_shape[i + 2]
                    pad = max(pad, 0)
                    pads[i] = pad // 2
                    pads[i + spatial] = pad - pad // 2
                node.set_attr("pads", pads)

        elif padding == 'VALID':
github onnx / tensorflow-onnx / tests / test_graph.py View on Github external
def rewrite_test(g, ops):
            pattern = \
                OpTypePattern('Add', name='op', inputs=["*", "*"])
            ops = g.get_nodes()
            matcher = GraphMatcher(pattern)
            match_results = list(matcher.match_ops(ops))
            for match in match_results:
                op = match.get_op('op')
                op.type = "Mul"
            return ops

        with tf.Session() as sess:
            x = tf.placeholder(tf.float32, [2, 3], name="input1")
            x_ = tf.add(x, x)
            _ = tf.identity(x_, name="output")
            g = process_tf_graph(sess.graph, opset=self.config.opset, custom_rewriter=[rewrite_test])
            self.assertEqual(
                'digraph { input1 [op_type=Placeholder shape="[2, 3]"] Add [op_type=Mul] '
                'output [op_type=Identity] input1:0 -> Add input1:0 -> Add Add:0 -> output }',
                onnx_to_graphviz(g))
github onnx / tensorflow-onnx / tests / test_internals.py View on Github external
def test_match_flipped(self):
        n1 = helper.make_node("Sub", ["i1", "i1"], ["n1:0"], name="n1")
        n2 = helper.make_node("Add", ["i2", "i2"], ["n2:0"], name="n2")
        n3 = helper.make_node("Mul", ["n1:0", "n2:0"], ["n3:0"], name="n3")

        graph_proto = helper.make_graph(
            nodes=[n1, n2, n3],
            name="test",
            inputs=[helper.make_tensor_value_info("i1", TensorProto.FLOAT, [2, 2]),
                    helper.make_tensor_value_info("i2", TensorProto.FLOAT, [2, 2])],
            outputs=[helper.make_tensor_value_info("n2:0", TensorProto.FLOAT, [2, 2])],
            initializer=[]
        )
        g = GraphUtil.create_graph_from_onnx_graph(graph_proto)
        pattern = OpTypePattern('Mul', inputs=[
            OpTypePattern('Add'),
            OpTypePattern('Sub')
        ])
        ops = g.get_nodes()
        matcher = GraphMatcher(pattern, allow_reorder=True)
        match_results = list(matcher.match_ops(ops))
        self.assertEqual(1, len(match_results))
github onnx / tensorflow-onnx / tests / test_internals.py View on Github external
def test_rewrite_subgraph(self):
        graph_proto = self.sample_net()
        g = GraphUtil.create_graph_from_onnx_graph(graph_proto)
        pattern = \
            OpTypePattern('Abs', name='output', inputs=[
                OpTypePattern('Add', name='input')
            ])
        ops = g.get_nodes()
        matcher = GraphMatcher(pattern)
        match_results = list(matcher.match_ops(ops))
        for match in match_results:
            input_node = match.get_op('input')
            output_node = match.get_op('output')
            op_name = utils.make_name("ReplacedOp")
            out_name = utils.port_name(op_name)
            new_node = g.make_node("Sub", inputs=input_node.input, outputs=[out_name], name=op_name)
            g.replace_all_inputs(ops, output_node.output[0], new_node.output[0])
            for n in set(match.get_nodes()):
                g.remove_node(n.name)
        g.topological_sort(ops)
        result = onnx_to_graphviz(g)
        expected = 'digraph { Placeholder__4 [op_type=Placeholder] n1 [op_type=Abs] ' \
github onnx / tensorflow-onnx / tests / test_graph.py View on Github external
"""Custom op test."""

        @tf_op("Print", onnx_op="Identity")
        class Print:
            @classmethod
            def version_1(cls, ctx, node, **kwargs):
                self.assertEqual(node.type, "Identity")
                node.domain = constants.TENSORFLOW_OPSET.domain
                del node.input[1:]
                return node

        with tf.Session() as sess:
            x = tf.placeholder(tf.float32, [2, 3], name="input1")
            x_ = tf.Print(x, [x], "hello")
            _ = tf.identity(x_, name="output")
            g = process_tf_graph(sess.graph,
                                 opset=self.config.opset,
                                 extra_opset=[constants.TENSORFLOW_OPSET])
            self.assertEqual(
                'digraph { input1 [op_type=Placeholder shape="[2, 3]"] Print [domain="ai.onnx.converters.tensorflow" '
                'op_type=Identity] output [op_type=Identity] input1:0 -> Print Print:0 -> output }',
                onnx_to_graphviz(g))
            self.assertEqual(g.opset, self.config.opset)
            self.assertEqual(g.extra_opset, [constants.TENSORFLOW_OPSET])
github onnx / tensorflow-onnx / tests / test_graph.py View on Github external
def test_squeeze(self):
        with tf.Session() as sess:
            x1 = tf.placeholder(tf.float32, [2, 3], name="input1")
            x_ = tf.squeeze(x1)
            _ = tf.identity(x_, name="output")
            g = process_tf_graph(sess.graph, opset=self.config.opset)
            self.assertEqual(
                'digraph { input1 [op_type=Placeholder shape="[2, 3]"] Squeeze [op_type=Squeeze] '
                'output [op_type=Identity] input1:0 -> Squeeze Squeeze:0 -> output }',
                onnx_to_graphviz(g))
github onnx / tensorflow-onnx / tests / test_graph.py View on Github external
def test_randomnormal(self):
        with tf.Session() as sess:
            x_ = tf.random_normal([2, 3], name="rand")
            _ = tf.identity(x_, name="output")
            g = process_tf_graph(sess.graph, opset=self.config.opset)
            actual = onnx_to_graphviz(g)
            expected = 'digraph { RandomNormal__2 [op_type=RandomNormal shape="[2, 3]"] output [op_type=Identity] ' \
                       'RandomNormal__2:0 -> output }'
            self.assertEqual(expected, actual)
github onnx / tensorflow-onnx / tests / test_graph.py View on Github external
def test_randomuniform(self):
        with tf.Session() as sess:
            shape = tf.constant([2, 3], name="shape")
            x_ = tf.random_uniform(shape, name="rand")
            x_ = tf.identity(x_, name="output1")
            x_ = tf.identity(x_, name="output2")
            _ = tf.identity(x_, name="output")
            g = process_tf_graph(sess.graph, opset=self.config.opset)
            self.assertEqual(
                'digraph { RandomUniform__2 [op_type=RandomUniform shape="[2, 3]"] output1 [op_type=Identity] '
                'output2 [op_type=Identity] output [op_type=Identity] RandomUniform__2:0 -> output1 '
                'output1:0 -> output2 output2:0 -> output }',
                onnx_to_graphviz(g))
github onnx / tensorflow-onnx / tests / test_internals.py View on Github external
def test_rewrite_subgraph(self):
        graph_proto = self.sample_net()
        g = GraphUtil.create_graph_from_onnx_graph(graph_proto)
        pattern = \
            OpTypePattern('Abs', name='output', inputs=[
                OpTypePattern('Add', name='input')
            ])
        ops = g.get_nodes()
        matcher = GraphMatcher(pattern)
        match_results = list(matcher.match_ops(ops))
        for match in match_results:
            input_node = match.get_op('input')
            output_node = match.get_op('output')
            op_name = utils.make_name("ReplacedOp")
            out_name = utils.port_name(op_name)
            new_node = g.make_node("Sub", inputs=input_node.input, outputs=[out_name], name=op_name)
            g.replace_all_inputs(ops, output_node.output[0], new_node.output[0])
            for n in set(match.get_nodes()):
                g.remove_node(n.name)
        g.topological_sort(ops)
        result = onnx_to_graphviz(g)
        expected = 'digraph { Placeholder__4 [op_type=Placeholder] n1 [op_type=Abs] ' \
                   'n3 [op_type=Abs] n2 [op_type=Abs] ReplacedOp__5 [op_type=Sub] ' \
                   'n6 [op_type=Identity] n5_graph_outputs_Identity__3 [op_type=Identity] ' \
                   'input -> n1 n1:0 -> n3 n1:0 -> n2 n2:0 -> ReplacedOp__5 n3:0 -> ReplacedOp__5 ' \