How to use the pycorrector.rnn_attention.nlc_model.NLCModel function in pycorrector

To help you get started, we’ve selected a few pycorrector examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github shibing624 / pycorrector / pycorrector / rnn_attention / error_analysis.py View on Github external
def create_model(session, vocab_size, forward_only):
    model = NLCModel(
        vocab_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
        FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, FLAGS.dropout,
        forward_only=forward_only)
    ckpt_paths = [f for f in os.listdir(FLAGS.train_dir) if (re.search(r"best\.ckpt-\d+", f) \
                                                             and not f.endswith("meta"))]
    assert (len(ckpt_paths) > 0)
    ckpt_paths = sorted(ckpt_paths, key=lambda x: int(x.split("-")[-1]))
    ckpt_path = os.path.join(FLAGS.train_dir, ckpt_paths[-1])
    if tf.gfile.Exists(ckpt_path):
        print("Reading model parameters from %s" % ckpt_path)
        model.saver.restore(session, ckpt_path)
    else:
        assert (False)
    return model
github shibing624 / pycorrector / pycorrector / rnn_attention / train.py View on Github external
def create_model(session, vocab_size, forward_only):
    model = NLCModel(
        vocab_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
        FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, FLAGS.dropout,
        forward_only=forward_only, optimizer=FLAGS.optimizer)
    checkpoint_file = tf.train.latest_checkpoint(FLAGS.train_dir)
    if checkpoint_file:
        logging.info("Reading model parameters from %s" % checkpoint_file)
        model.saver.restore(session, checkpoint_file)
    else:
        logging.info("Created model with fresh parameters.")
        session.run(tf.global_variables_initializer())
        logging.info('Num params: %d' % sum(v.get_shape().num_elements() for v in tf.trainable_variables()))
    return model
github shibing624 / pycorrector / pycorrector / rnn_attention / infer.py View on Github external
def create_model(session, vocab_size, forward_only):
    model = NLCModel(
        vocab_size, FLAGS.size, FLAGS.num_layers, FLAGS.max_gradient_norm, FLAGS.batch_size,
        FLAGS.learning_rate, FLAGS.learning_rate_decay_factor, FLAGS.dropout,
        forward_only=forward_only)
    checkpoint_file = tf.train.latest_checkpoint(FLAGS.train_dir)
    print("checkpoint file", checkpoint_file)
    if checkpoint_file:
        print("Reading model parameters from %s" % checkpoint_file)
        model.saver.restore(session, checkpoint_file)
    else:
        print("Created model with fresh parameters.")
        session.run(tf.global_variables_initializer())
    return model