How to use the mlagents.trainers.barracuda function in mlagents

To help you get started, we’ve selected a few mlagents examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Unity-Technologies / marathon-envs / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
context.model_tensors["kernel_i"] = kernels[0]
    context.model_tensors["kernel_j"] = kernels[1]
    context.model_tensors["kernel_f"] = kernels[2]
    context.model_tensors["kernel_o"] = kernels[3]
    context.model_tensors["bias_i"] = biases[0]
    context.model_tensors["bias_j"] = biases[1]
    context.model_tensors["bias_f"] = biases[2] + forget_bias
    context.model_tensors["bias_o"] = biases[3]

    context.layer_ranks[state_c] = 2
    context.layer_ranks[state_h] = 2

    # lstm_value/strided_slice/stack => lstm_value
    lstm_name = next(i.name for i in nodes if i.name.startswith("lstm")).split("/")[0]

    new_layers = barracuda.lstm(
        lstm_name,
        input,
        state_c,
        state_h,
        "kernel_i",
        "kernel_j",
        "kernel_f",
        "kernel_o",
        "bias_i",
        "bias_j",
        "bias_f",
        "bias_o",
        new_state_c,
        new_state_h,
    )
github Unity-Technologies / marathon-envs / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
def is_unconnected_identity(layer):
            if layer.class_name == "Activation" and layer.activation == 0:  # Identity
                assert len(layer.inputs) == 1
                if layer.inputs[0] not in all_layers and layer.name not in all_inputs:
                    return True
            return False

        return [l for l in layers if not is_unconnected_identity(l)]

    o_model.layers = cleanup_layers(o_model.layers)

    all_inputs = {i for l in o_model.layers for i in l.inputs}

    # Trim
    if trim_unused_by_output:
        o_model.layers = barracuda.trim(
            o_model.layers, trim_unused_by_output, args.verbose
        )

    # Create load layer for constants
    def dims_to_barracuda_shape(dims):
        shape = list(dims)
        while len(shape) < 4:
            shape = [1] + shape
        return shape

    const_tensors = [i for i in all_inputs if i in o_model.tensors]
    const_tensors += o_model.globals
    for x in const_tensors:
        shape = dims_to_barracuda_shape(get_tensor_dims(o_model.tensors[x]))
        o_l = Struct(
            type=255,  # Load
github StepNeverStop / RLs / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
def is_unconnected_identity(layer):
            if layer.class_name == "Activation" and layer.activation == 0:  # Identity
                assert len(layer.inputs) == 1
                if layer.inputs[0] not in all_layers and layer.name not in all_inputs:
                    return True
            return False

        return [l for l in layers if not is_unconnected_identity(l)]

    o_model.layers = cleanup_layers(o_model.layers)

    all_inputs = {i for l in o_model.layers for i in l.inputs}

    # Trim
    if trim_unused_by_output:
        o_model.layers = barracuda.trim(
            o_model.layers, trim_unused_by_output, args.verbose
        )

    # Create load layer for constants
    def dims_to_barracuda_shape(dims):
        shape = list(dims)
        while len(shape) < 4:
            shape = [1] + shape
        return shape

    const_tensors = [i for i in all_inputs if i in o_model.tensors]
    const_tensors += o_model.globals
    for x in const_tensors:
        shape = dims_to_barracuda_shape(get_tensor_dims(o_model.tensors[x]))
        o_l = Struct(
            type=255,  # Load
github Unity-Technologies / ml-agents / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
import numpy as np
import struct  # convert from Python values and C structs
import tensorflow as tf
import re

# import barracuda
# from barracuda import Struct
from mlagents.trainers import barracuda
from mlagents.trainers.barracuda import Struct
from google.protobuf import descriptor
from google.protobuf.json_format import MessageToJson


if __name__ == "__main__":
    # Handle command line argumengts
    args = barracuda.parse_args(
        description="Convert Tensorflow model to Barracuda binary",
        source_extension=".pb",
        help="input Tensorflow serialized .pb file",
    )
    # Te following code can be used as an example of API used from another module
    # convert() is the main entry point for converter
    import tensorflow_to_barracuda as tf2bc

    tf2bc.convert(args.source_file, args.target_file, args.trim_unused_by_output, args)


# TODO: support more than 1 LSTM layer per model - prepend scope to names and inputs
# TODO: support different activation functions in LSTM
# TODO: strip output Identity node, instead patch upstream layer names
# TODO: use ScaleBias and Pow with alpha when input is constant Tensor
# TODO: support all data format types (curretly only NHWC)
github Unity-Technologies / ml-agents / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
# Sort model so that layer inputs are always ready upfront
    o_model.layers = barracuda.sort(
        o_model.layers, o_model.inputs, o_model.memories, args.verbose
    )
    o_model.layers = barracuda.fuse(o_model.layers, args.verbose)

    # Summary
    barracuda.summary(
        o_model,
        print_layer_links=args.print_layer_links or args.verbose,
        print_barracuda_json=args.print_barracuda_json or args.verbose,
        print_tensors=args.print_tensors or args.verbose,
    )

    # Write to file
    barracuda.write(o_model, target_file)
    print("DONE: wrote", target_file, "file.")
github Unity-Technologies / ml-agents / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
def sqr_diff(name, a, b):
    nn = barracuda.Build(name)
    d = nn.sub(a, b)
    nn.mul(d, d, out=name)
    return nn.layers
github Unity-Technologies / ml-agents / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
patch_data=lambda data:
        # fuse [gamma, beta, mean, var, epsilon] => [scale, bias]
        # TODO: double-check if epsilon is the last data argument and not the 1st?
        barracuda.fuse_batchnorm_weights(data[0], data[1], data[2], data[3], data[4])
        if len(data) == 5
        else
        # fuse [ONE, beta, mean, var, epsilon] => [scale, bias]
        # TODO: double-check if epsilon is the last data argument and not the 1st?
        barracuda.fuse_batchnorm_weights(
            np.ones(np.shape(data[0])), data[0], data[1], data[2], data[3]
        ),
github StepNeverStop / RLs / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
return False

    o_model.outputs = [l.name for l in o_model.layers if is_output_layer(l)]

    # Compress
    if compress_f16:
        o_model = barracuda.compress(o_model)

    # Sort model so that layer inputs are always ready upfront
    o_model.layers = barracuda.sort(
        o_model.layers, o_model.inputs, o_model.memories, args.verbose
    )
    o_model.layers = barracuda.fuse(o_model.layers, args.verbose)

    # Summary
    barracuda.summary(
        o_model,
        print_layer_links=args.print_layer_links or args.verbose,
        print_barracuda_json=args.print_barracuda_json or args.verbose,
        print_tensors=args.print_tensors or args.verbose,
    )

    # Write to file
    barracuda.write(o_model, target_file)
    print("DONE: wrote", target_file, "file.")
github Unity-Technologies / marathon-envs / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
return False

    o_model.outputs = [l.name for l in o_model.layers if is_output_layer(l)]

    # Compress
    if compress_f16:
        o_model = barracuda.compress(o_model)

    # Sort model so that layer inputs are always ready upfront
    o_model.layers = barracuda.sort(
        o_model.layers, o_model.inputs, o_model.memories, args.verbose
    )
    o_model.layers = barracuda.fuse(o_model.layers, args.verbose)

    # Summary
    barracuda.summary(
        o_model,
        print_layer_links=args.print_layer_links or args.verbose,
        print_barracuda_json=args.print_barracuda_json or args.verbose,
        print_tensors=args.print_tensors or args.verbose,
    )

    # Write to file
    barracuda.write(o_model, target_file)
    print("DONE: wrote", target_file, "file.")
github Unity-Technologies / marathon-envs / ml-agents / mlagents / trainers / tensorflow_to_barracuda.py View on Github external
# Load Tensorflow model
    print("Converting %s to %s" % (source_file, target_file))
    f = open(source_file, "rb")
    i_model = tf.GraphDef()
    i_model.ParseFromString(f.read())

    if args.verbose:
        print("OP_TYPES:", {layer.op for layer in i_model.node})

    if args.print_source_json or args.verbose:
        for layer in i_model.node:
            if not layer.op == "Const":
                print("MODEL:", MessageToJson(layer) + ",")

    # Convert
    o_model = barracuda.Model()
    o_model.layers, o_input_shapes, o_model.tensors, o_model.memories, o_model.globals = process_model(
        i_model, args
    )

    # Cleanup unconnected Identities (they might linger after processing complex node patterns like LSTM)
    def cleanup_layers(layers):
        all_layers = {l.name for l in layers}
        all_inputs = {i for l in layers for i in l.inputs}

        def is_unconnected_identity(layer):
            if layer.class_name == "Activation" and layer.activation == 0:  # Identity
                assert len(layer.inputs) == 1
                if layer.inputs[0] not in all_layers and layer.name not in all_inputs:
                    return True
            return False