How to use the tensorflowonspark.TFNode.export_saved_model function in tensorflowonspark

To help you get started, we’ve selected a few tensorflowonspark examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github yahoo / TensorFlowOnSpark / examples / imagenet / inception / inception_export.py View on Github external
#   /my-favorite-path/imagenet_train/model.ckpt-0,
    # extract global_step from it.
    global_step = ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1]
    print('Successfully loaded model from %s at step=%s.' %
          (ckpt.model_checkpoint_path, global_step))

    print("Exporting saved_model to: {}".format(FLAGS.export_dir))
    # exported signatures defined in code
    signatures = {
      tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: {
        'inputs': { 'jpegs': jpegs, 'labels': labels },
        'outputs': { 'top_5_acc': top_5_op },
        'method_name': tf.saved_model.signature_constants.PREDICT_METHOD_NAME
      }
    }
    TFNode.export_saved_model(sess,
                              FLAGS.export_dir,
                              tf.saved_model.tag_constants.SERVING,
                              signatures)
    print("Exported saved_model")
github yahoo / TensorFlowOnSpark / examples / utils / model_export.py View on Github external
else:
      # assume JSON file
      with open(FLAGS.signatures) as f:
        signatures = json.load(f)

    # convert string input/output values with actual tensors from graph
    for name, sig in signatures.items():
      for k, v in sig['inputs'].items():
        tensor_name = v if v.endswith(':0') else v + ':0'
        sig['inputs'][k] = g.get_tensor_by_name(tensor_name)
      for k, v in sig['outputs'].items():
        tensor_name = v if v.endswith(':0') else v + ':0'
        sig['outputs'][k] = g.get_tensor_by_name(tensor_name)

    # export a saved model
    TFNode.export_saved_model(sess,
                              FLAGS.export_dir,
                              tf.saved_model.tag_constants.SERVING,
                              signatures)
github yahoo / TensorFlowOnSpark / examples / mnist / tf / mnist_dist_pipeline.py View on Github external
logging.info("Exporting saved_model to: {}".format(args.export_dir))
    # exported signatures defined in code
    signatures = {
      tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: {
        'inputs': {'image': x},
        'outputs': {'prediction': prediction},
        'method_name': tf.saved_model.signature_constants.PREDICT_METHOD_NAME
      },
      'featurize': {
        'inputs': {'image': x},
        'outputs': {'features': hid},
        'method_name': 'featurize'
      }
    }
    TFNode.export_saved_model(sess,
                              args.export_dir,
                              tf.saved_model.tag_constants.SERVING,
                              signatures)
    logging.info("Exported saved_model")
github yahoo / TensorFlowOnSpark / examples / mnist / tf / mnist_dist.py View on Github external
with tf.Session() as sess:
        ckpt = tf.train.get_checkpoint_state(model_dir)
        print("ckpt: {}".format(ckpt))
        assert ckpt, "Invalid model checkpoint path: {}".format(model_dir)
        saver.restore(sess, ckpt.model_checkpoint_path)

        print("Exporting saved_model to: {}".format(export_dir))
        # exported signatures defined in code
        signatures = {
          tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: {
            'inputs': { 'image': x },
            'outputs': { 'prediction': prediction },
            'method_name': tf.saved_model.signature_constants.PREDICT_METHOD_NAME
          }
        }
        TFNode.export_saved_model(sess,
                                  export_dir,
                                  tf.saved_model.tag_constants.SERVING,
                                  signatures)
        print("Exported saved_model")

    # WORKAROUND for https://github.com/tensorflow/tensorflow/issues/21745
    # wait for all other nodes to complete (via done files)
    done_dir = "{}/{}/done".format(ctx.absolute_path(args.model), args.mode)
    print("Writing done file to: {}".format(done_dir))
    tf.gfile.MakeDirs(done_dir)
    with tf.gfile.GFile("{}/{}".format(done_dir, ctx.task_index), 'w') as done_file:
      done_file.write("done")

    for i in range(60):
      if len(tf.gfile.ListDirectory(done_dir)) < len(ctx.cluster_spec['worker']):
        print("{} Waiting for other nodes {}".format(datetime.now().isoformat(), i))
github yahoo / TensorFlowOnSpark / examples / mnist / spark / mnist_dist_pipeline.py View on Github external
if sv.is_chief and args.export_dir:
        print("{0} exporting saved_model to: {1}".format(datetime.now().isoformat(), args.export_dir))
        # exported signatures defined in code
        signatures = {
          tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: {
            'inputs': {'image': x},
            'outputs': {'prediction': prediction},
            'method_name': tf.saved_model.signature_constants.PREDICT_METHOD_NAME
          },
          'featurize': {
            'inputs': {'image': x},
            'outputs': {'features': hid},
            'method_name': 'featurize'
          }
        }
        TFNode.export_saved_model(sess,
                                  args.export_dir,
                                  tf.saved_model.tag_constants.SERVING,
                                  signatures)
      else:
        # non-chief workers should wait for chief
        while not sv.should_stop():
          print("Waiting for chief")
          time.sleep(5)

    # Ask for all the services to stop.
    print("{0} stopping supervisor".format(datetime.now().isoformat()))
    sv.stop()
github yahoo / TensorFlowOnSpark / tensorflowonspark / TFSparkNode.py View on Github external
def export_saved_model(self, sess, export_dir, tag_set, signatures):
    """Convenience function to access ``TFNode.export_saved_model`` directly from this object instance."""
    TFNode.export_saved_model(sess, export_dir, tag_set, signatures)