How to use the polyaxon.Modes function in polyaxon

To help you get started, we’ve selected a few polyaxon examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github polyaxon / polyaxon / tests / test_libs / test_template_module.py View on Github external
def test_sharing(self):
        batch_size = 3
        in_size = 4
        inputs1 = tf.placeholder(tf.float32, shape=[batch_size, in_size])
        inputs2 = tf.placeholder(tf.float32, shape=[batch_size, in_size])

        def dummy_fn(mode, inputs, output_size):
            weight_shape = [inputs.get_shape().as_list()[-1], output_size]
            weight = tf.get_variable("w", shape=weight_shape, dtype=inputs.dtype)
            return tf.matmul(inputs, weight)

        build_fn = functools.partial(dummy_fn, output_size=10)
        model = plx.libs.FunctionModule(plx.Modes.TRAIN, build_fn)
        outputs1 = model(inputs1)
        outputs2 = model(inputs2)

        self.assertEqual(model.scope_name(), "dummy_fn")

        import numpy as np
        input_data = np.random.rand(batch_size, in_size)

        with self.test_session() as sess:
            sess.run(tf.global_variables_initializer())
            outputs1, outputs2 = sess.run(
                [outputs1, outputs2], feed_dict={inputs1: input_data, inputs2: input_data})
            self.assertAllClose(outputs1, outputs2)
github polyaxon / polyaxon / polyaxon / datasets / flowers17.py View on Github external
filenames = [train_filename, eval_filename, test_filename]
    files_exist = [tf.gfile.Exists(f) for f in filenames]
    if all(files_exist):
        print('Dataset files already exist. Exiting without re-creating them.')
        return

    if any(files_exist):
        print('Some Dataset files already exist but not all of them. Re-creating them.')
        delete_datasets('.', filenames)

    filesnames_by_classes = filenames_by_classes(dataset_dir, num_images, folds)

    with tf.python_io.TFRecordWriter(train_filename) as tfrecord_writer:
        with tf.Session('') as session:
            print('converting {} images.'.format(Modes.TRAIN))
            convert_images(
                session, tfrecord_writer, converter, filesnames_by_classes[Modes.TRAIN])

    with tf.python_io.TFRecordWriter(eval_filename) as tfrecord_writer:
        with tf.Session('') as session:
            print('converting {} images.'.format(Modes.EVAL))
            convert_images(
                session, tfrecord_writer, converter, filesnames_by_classes[Modes.EVAL])

    with tf.python_io.TFRecordWriter(test_filename) as tfrecord_writer:
        with tf.Session('') as session:
            print('converting test images.')
            convert_images(session, tfrecord_writer, converter,
                           filesnames_by_classes[Modes.PREDICT])
github polyaxon / polyaxon / examples / conv_autoencoder.py View on Github external
def create_experiment_json_fn(output_dir):
    """Creates an auto encoder on MNIST handwritten digits.

    inks:
        * [MNIST Dataset] http://yann.lecun.com/exdb/mnist/
    """
    dataset_dir = './data/mnist'
    mnist.prepare(dataset_dir)
    train_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.TRAIN)
    eval_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.EVAL)
    meta_data_file = mnist.MEAT_DATA_FILENAME_FORMAT.format(dataset_dir)

    config = {
        'name': 'conv_autoencoder_mnsit',
        'output_dir': output_dir,
        'eval_every_n_steps': 100,
        'train_steps_per_iteration': 1000,
        'run_config': {'save_checkpoints_steps': 1000},
        'train_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 64,  'num_epochs': 10,
                                'shuffle': True, 'dynamic_pad': False,
                                'params': {'data_files': train_data_file,
                                           'meta_data_file': meta_data_file},
                                'definition': {
                                    'image': [
github polyaxon / polyaxon / examples / autoencoder.py View on Github external
def create_experiment_json_fn(output_dir):
    """Creates an auto encoder on MNIST handwritten digits.

    inks:
        * [MNIST Dataset] http://yann.lecun.com/exdb/mnist/
    """
    dataset_dir = './data/mnist'
    mnist.prepare(dataset_dir)
    train_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.TRAIN)
    eval_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.EVAL)
    meta_data_file = mnist.MEAT_DATA_FILENAME_FORMAT.format(dataset_dir)

    config = {
        'name': 'autoencoder_mnsit',
        'output_dir': output_dir,
        'eval_every_n_steps': 100,
        'train_steps_per_iteration': 100,
        'run_config': {'save_checkpoints_steps': 100},
        'train_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 64,  'num_epochs': 10,
                                'shuffle': True, 'dynamic_pad': False,
                                'params': {'data_files': train_data_file,
                                           'meta_data_file': meta_data_file},
                                'definition': {
                                    'image': [
github polyaxon / polyaxon / examples / conv_highway_mnist.py View on Github external
def create_experiment_json_fn(output_dir):
    """Creates an experiment using cnn for MNIST dataset classification task."""

    dataset_dir = './data/mnist'
    mnist.prepare(dataset_dir)
    train_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.TRAIN)
    eval_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.EVAL)
    meta_data_file = mnist.MEAT_DATA_FILENAME_FORMAT.format(dataset_dir)

    config = {
        'name': 'conv_highway',
        'output_dir': output_dir,
        'eval_every_n_steps': 5,
        'run_config': {'save_checkpoints_steps': 100},
        'train_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 64, 'num_epochs': 5,
                                'shuffle': True, 'dynamic_pad': False,
                                'params': {'data_files': train_data_file,
                                           'meta_data_file': meta_data_file}},
        },
        'eval_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 32, 'num_epochs': 1,
github polyaxon / polyaxon / examples / vgg19.py View on Github external
def create_experiment_json_fn(output_dir):
    """Creates an experiement using a VGG19 to Oxford's 17 Category Flower Dataset.

    References:
        * Very Deep Convolutional Networks for Large-Scale Image Recognition.
        K. Simonyan, A. Zisserman. arXiv technical report, 2014.

    Links:
        * http://arxiv.org/pdf/1409.1556
    """
    dataset_dir = './data/mnist'
    mnist.prepare(dataset_dir)
    train_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.TRAIN)
    eval_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.EVAL)
    meta_data_file = mnist.MEAT_DATA_FILENAME_FORMAT.format(dataset_dir)

    config = {
        'name': 'vgg19',
        'output_dir': output_dir,
        'eval_every_n_steps': 10,
        'train_steps_per_iteration': 100,
        'run_config': {'save_checkpoints_steps': 100},
        'train_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 64,  'num_epochs': 1,
                                'shuffle': True, 'dynamic_pad': False,
                                'params': {'data_files': train_data_file,
                                           'meta_data_file': meta_data_file}},
        },
        'eval_input_data_config': {
github polyaxon / polyaxon / examples / variational_autoencoder.py View on Github external
def create_experiment_json_fn(output_dir):
    """Creates a variational auto encoder on MNIST handwritten digits.

    inks:
        * [MNIST Dataset] http://yann.lecun.com/exdb/mnist/
    """
    dataset_dir = './data/mnist'
    mnist.prepare(dataset_dir)
    train_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.TRAIN)
    eval_data_file = mnist.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.EVAL)
    meta_data_file = mnist.MEAT_DATA_FILENAME_FORMAT.format(dataset_dir)

    config = {
        'name': 'vae_mnsit',
        'output_dir': output_dir,
        'eval_every_n_steps': 100,
        'train_steps_per_iteration': 100,
        'run_config': {'save_checkpoints_steps': 100},
        'train_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 64,  'num_epochs': 5,
                                'shuffle': True, 'dynamic_pad': False,
                                'params': {'data_files': train_data_file,
                                           'meta_data_file': meta_data_file},
                                'definition': {
                                    'image': [
github polyaxon / polyaxon / polyaxon / estimators / agents.py View on Github external
'discount_reward': tf.placeholder(dtype=tf.float32,
                                                      shape=(None,),
                                                      name='discount_reward'),
                    'done': tf.placeholder(dtype=tf.bool, shape=(None,), name='done'),

                    'max_reward': tf.placeholder(
                        dtype=tf.float32, shape=(), name='max_reward'),
                    'min_reward': tf.placeholder(
                        dtype=tf.float32, shape=(), name='min_reward'),
                    'avg_reward': tf.placeholder(
                        dtype=tf.float32, shape=(), name='avg_reward'),
                    'total_reward': tf.placeholder(
                        dtype=tf.float32, shape=(), name='total_reward'),
                }
            )
        if Modes.is_infer(mode):
            return features, None
github polyaxon / polyaxon / examples / convnet_cifar10.py View on Github external
def create_experiment_json_fn(output_dir):
    """Creates an experiment using cnn for CIFAR-10 dataset classification task.

    References:
        * Learning Multiple Layers of Features from Tiny Images, A. Krizhevsky, 2009.

    Links:
        * [CIFAR-10 Dataset](https://www.cs.toronto.edu/~kriz/cifar.html)
    """
    dataset_dir = './data/cifar10'
    cifar10.prepare(dataset_dir)
    train_data_file = cifar10.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.TRAIN)
    eval_data_file = cifar10.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.EVAL)
    meta_data_file = cifar10.MEAT_DATA_FILENAME_FORMAT.format(dataset_dir)

    config = {
        'name': 'convnet_cifar10',
        'output_dir': output_dir,
        'eval_every_n_steps': 100,
        'run_config': {'save_checkpoints_steps': 100},
        'train_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 64,
                                'shuffle': True, 'dynamic_pad': False,
                                'params': {'data_files': train_data_file,
                                           'meta_data_file': meta_data_file}},
        },
        'eval_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 32,
                                'shuffle': True, 'dynamic_pad': False,
github polyaxon / polyaxon / examples / alexnet_flowers17.py View on Github external
def create_experiment_json_fn(output_dir):
    """Creates an experiment using Alexnet applied to Oxford's 17  Category Flower Dataset.

    References:
        * Alex Krizhevsky, Ilya Sutskever & Geoffrey E. Hinton. ImageNet Classification with
        Deep Convolutional Neural Networks. NIPS, 2012.
        * 17 Category Flower Dataset. Maria-Elena Nilsback and Andrew Zisserman.

    Links:
        * [AlexNet Paper](http://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks.pdf)
        * [Flower Dataset (17)](http://www.robots.ox.ac.uk/~vgg/data/flowers/17/)
    """
    dataset_dir = './data/flowers17'
    flowers17.prepare(dataset_dir)
    train_data_file = flowers17.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.TRAIN)
    eval_data_file = flowers17.RECORD_FILE_NAME_FORMAT.format(dataset_dir, plx.Modes.EVAL)
    meta_data_file = flowers17.MEAT_DATA_FILENAME_FORMAT.format(dataset_dir)

    config = {
        'name': 'alexnet_flowers17',
        'output_dir': output_dir,
        'eval_every_n_steps': 10,
        'train_steps_per_iteration': 100,
        'run_config': {'save_checkpoints_steps': 100},
        'train_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 64, 'num_epochs': 1,
                                'shuffle': True, 'dynamic_pad': False,
                                'params': {'data_files': train_data_file,
                                           'meta_data_file': meta_data_file}},
        },
        'eval_input_data_config': {
            'pipeline_config': {'module': 'TFRecordImagePipeline', 'batch_size': 32, 'num_epochs': 1,