How to use netron - 10 common examples

To help you get started, we’ve selected a few netron examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github yankov / netron / server.py View on Github external
input_shape = [int(dim) for dim in args.input_shape.split(",")]

# TODO: cleanup repetative code
print "Starting a server with %s solver and %s dataset" % (args.solver, args.data)
if args.solver == "GridSearch":
    solver = GridSearch(args.grid, input_shape, args.output_dim, "keras", args.data)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "RandomSearch":
    if not args.params_sample_size or not args.structure_sample_size:
        raise ValueError("--params_sample_size  and --structure_sample_size must be used with RandomSearch")
    solver = RandomSearch(args.grid, input_shape, args.output_dim, "keras", args.data, args.params_sample_size,
                          args.structure_sample_size)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "HyperOpt":
    h = HyperOptSearch(input_shape=input_shape, output_dim=args.output_dim)
    h.start_search_server(args.mongo_uri, args.data, int(args.layers_num), args.max_evals, args.nb_epoch, args.patience)
else:
    raise ValueError("This solver is not supported. Only possible values for --solver right now are GridSearch or RandomSearch")
github yankov / netron / server.py View on Github external
parser.add_argument('--structure_sample_size', type=int, required=False, help="Only for RandomSearch: network structure sample size per given number of layers.")
parser.add_argument('--mongo_uri', required=False, default="mongodb://localhost:27017/", help="MongoDB connection string URI.")
parser.add_argument('--layers_num', required=False, help="Number of layers for neural networks.")
parser.add_argument('--max_evals', required=False, default=1e6, help="Max number of samples to train")
parser.add_argument('--nb_epoch', required=False, default=10, type=int, help="Max mumber of epoch per job.")
parser.add_argument('--patience', required=False, default=5, type=int, help="Max mumber of epoch without improvement (EarlyStopper).")

args = parser.parse_args()

input_shape = [int(dim) for dim in args.input_shape.split(",")]

# TODO: cleanup repetative code
print "Starting a server with %s solver and %s dataset" % (args.solver, args.data)
if args.solver == "GridSearch":
    solver = GridSearch(args.grid, input_shape, args.output_dim, "keras", args.data)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "RandomSearch":
    if not args.params_sample_size or not args.structure_sample_size:
        raise ValueError("--params_sample_size  and --structure_sample_size must be used with RandomSearch")
    solver = RandomSearch(args.grid, input_shape, args.output_dim, "keras", args.data, args.params_sample_size,
                          args.structure_sample_size)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "HyperOpt":
    h = HyperOptSearch(input_shape=input_shape, output_dim=args.output_dim)
    h.start_search_server(args.mongo_uri, args.data, int(args.layers_num), args.max_evals, args.nb_epoch, args.patience)
else:
    raise ValueError("This solver is not supported. Only possible values for --solver right now are GridSearch or RandomSearch")
github yankov / netron / server.py View on Github external
args = parser.parse_args()

input_shape = [int(dim) for dim in args.input_shape.split(",")]

# TODO: cleanup repetative code
print "Starting a server with %s solver and %s dataset" % (args.solver, args.data)
if args.solver == "GridSearch":
    solver = GridSearch(args.grid, input_shape, args.output_dim, "keras", args.data)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "RandomSearch":
    if not args.params_sample_size or not args.structure_sample_size:
        raise ValueError("--params_sample_size  and --structure_sample_size must be used with RandomSearch")
    solver = RandomSearch(args.grid, input_shape, args.output_dim, "keras", args.data, args.params_sample_size,
                          args.structure_sample_size)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "HyperOpt":
    h = HyperOptSearch(input_shape=input_shape, output_dim=args.output_dim)
    h.start_search_server(args.mongo_uri, args.data, int(args.layers_num), args.max_evals, args.nb_epoch, args.patience)
else:
    raise ValueError("This solver is not supported. Only possible values for --solver right now are GridSearch or RandomSearch")
github yankov / netron / server.py View on Github external
input_shape = [int(dim) for dim in args.input_shape.split(",")]

# TODO: cleanup repetative code
print "Starting a server with %s solver and %s dataset" % (args.solver, args.data)
if args.solver == "GridSearch":
    solver = GridSearch(args.grid, input_shape, args.output_dim, "keras", args.data)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "RandomSearch":
    if not args.params_sample_size or not args.structure_sample_size:
        raise ValueError("--params_sample_size  and --structure_sample_size must be used with RandomSearch")
    solver = RandomSearch(args.grid, input_shape, args.output_dim, "keras", args.data, args.params_sample_size,
                          args.structure_sample_size)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "HyperOpt":
    h = HyperOptSearch(input_shape=input_shape, output_dim=args.output_dim)
    h.start_search_server(args.mongo_uri, args.data, int(args.layers_num), args.max_evals, args.nb_epoch, args.patience)
else:
    raise ValueError("This solver is not supported. Only possible values for --solver right now are GridSearch or RandomSearch")
github yankov / netron / server.py View on Github external
parser.add_argument('--params_sample_size', type=int, required=False, help="Only for RandomSearch: parameter sample size per network structure.")
parser.add_argument('--structure_sample_size', type=int, required=False, help="Only for RandomSearch: network structure sample size per given number of layers.")
parser.add_argument('--mongo_uri', required=False, default="mongodb://localhost:27017/", help="MongoDB connection string URI.")
parser.add_argument('--layers_num', required=False, help="Number of layers for neural networks.")
parser.add_argument('--max_evals', required=False, default=1e6, help="Max number of samples to train")
parser.add_argument('--nb_epoch', required=False, default=10, type=int, help="Max mumber of epoch per job.")
parser.add_argument('--patience', required=False, default=5, type=int, help="Max mumber of epoch without improvement (EarlyStopper).")

args = parser.parse_args()

input_shape = [int(dim) for dim in args.input_shape.split(",")]

# TODO: cleanup repetative code
print "Starting a server with %s solver and %s dataset" % (args.solver, args.data)
if args.solver == "GridSearch":
    solver = GridSearch(args.grid, input_shape, args.output_dim, "keras", args.data)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "RandomSearch":
    if not args.params_sample_size or not args.structure_sample_size:
        raise ValueError("--params_sample_size  and --structure_sample_size must be used with RandomSearch")
    solver = RandomSearch(args.grid, input_shape, args.output_dim, "keras", args.data, args.params_sample_size,
                          args.structure_sample_size)
    job_manager = JobManager(solver)
    server = JobHTTPServer(args.port, job_manager, args.mongo_uri)
    server.start()
elif args.solver == "HyperOpt":
    h = HyperOptSearch(input_shape=input_shape, output_dim=args.output_dim)
    h.start_search_server(args.mongo_uri, args.data, int(args.layers_num), args.max_evals, args.nb_epoch, args.patience)
else:
    raise ValueError("This solver is not supported. Only possible values for --solver right now are GridSearch or RandomSearch")
github yankov / netron / netron / worker / KerasModel.py View on Github external
def __init__(self, exp_id, nb_epoch = 10, patience = 5, mongo_uri = "mongodb://localhost:27017/", data_filename = None):
        self.exp_id = exp_id
        self.model = None
        self.data = None
        self.data_loader = None

        # How many epoch to train (if score earlystopping doesn't kick in earlier)
        self.nb_epoch = nb_epoch

        # How many epoch of not improving before earlystopper kicks in
        self.patience = patience

        self.mongo_uri = mongo_uri

        self.factory = KerasModelFactory()

        self.data_loader = DataLoader("http://localhost:8080", self.mongo_uri)
github yankov / netron / netron / solvers / Solver.py View on Github external
def get_model_factory(self, model_type):
        if model_type == "keras":
            return KerasModelFactory()
        else:
            raise ValueError("%s is not supported. Only Keras models are supported right now." % model_type)
github yankov / netron / netron / server / JobHTTPServer.py View on Github external
(r"/stats/(.*)", StatsHandler, {"mongo_uri": self.mongo_uri}),
            (r"/data/(.*)", tornado.web.StaticFileHandler, {'path': self.static_path})
            ],
        template_path=os.path.join(os.path.dirname(__file__), "templates"),
        static_path= self.static_path)

        self.port = port

    def start(self):
        server = HTTPServer(self.routes)
        server.listen(self.port)
        IOLoop.current().start()

# Example
if __name__ == "__main__":
    job_manager = JobManager(solver = RandomSearch(simple_params_grid, 1, 1, 10, "keras", "mnist_train.npz"))
    #job_manager = JobManager(solver = GridSearch(simple_params_grid, 1, 1, "keras", "sin_data.npz"))
    server = JobHTTPServer(8080, job_manager)
    server.start()
github yankov / netron / netron / solvers / RandomSearch.py View on Github external
from netron.solvers import Solver
from netron.grid import NeuralNetGrid
from sklearn.grid_search import ParameterSampler, ParameterGrid
import random
import itertools
import hashlib
import time

class RandomSearch(Solver):
    # If we sample more than this number of already seen networks
    # consecutively, then skip to the next network size
    STRUCT_DUP_THRESHOLD = 100

    def initialize(self, params_sample_size, structure_sample_size):
        self.params_sample_size = params_sample_size
        self.structure_sample_size = structure_sample_size
        self.seen_structures = set()

    def random_product(self, *args, **kwds):
        "Random selection from itertools.product(*args, **kwds)"
        pools = map(tuple, args) * kwds.get('repeat', 1)
        return tuple(random.choice(pool) for pool in pools)

    def create_network_structures(self, layers, layers_num, input_shape):
        """Returns all combinations of given set of layers with given set of sizes"""
github yankov / netron / netron / solvers / GridSearch.py View on Github external
from netron.solvers import Solver
from netron.grid import NeuralNetGrid
from sklearn.grid_search import ParameterGrid
import itertools

class GridSearch(Solver):

    def create_network_structures(self, layers, layers_num, input_shape):
        """Returns all combinations of given set of layers with given set of sizes"""
        for i in layers_num:
            for net_struct in itertools.product(layers, repeat=i):
                fixed_net_struct = self.model_factory.fix_or_skip(net_struct, input_shape)
                if fixed_net_struct:
                    yield fixed_net_struct
                else:
                    print "skipping invalid structure: %s" % "->".join(net_struct)
                    continue

    def generate_models(self, input_shape, output_dim):
        loss_type = self.grid.params_grid["loss"][0]
        for layers in self.create_network_structures(self.grid.params_grid["layers"], self.grid.params_grid["layer_nums"], input_shape):
            print "Current network: %s" % "->".join(layers)