How to use plac - 10 common examples

To help you get started, we’ve selected a few plac examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github micheles / plac / doc / test_ishelve_more.py View on Github external
def test():
    with plac.Interpreter(ishelve.main) as i:
        i.check('.clear', 'cleared the shelve')
        i.check('a=1', 'setting a=1')
        i.check('a', '1')
        i.check('.delete=a', 'deleted a')
        i.check('a', 'a: not found')
github micheles / plac / doc / test_plac.py View on Github external
def check_help(name):
    sys.argv[0] = name + '.py'  # avoid issue with pytest
    plac_core._parser_registry.clear()  # makes different imports independent
    try:
        try:
            main = plac.import_main(name + '.py')
        except SyntaxError:
            if sys.version < '3':  # expected for Python 2.X
                return
            else:  # not expected for Python 3.X
                raise
        p = plac.parser_from(main)
        expected = fix_today(open(name + '.help').read()).strip()
        got = p.format_help().strip()
        assert got == expected, got
    finally:
        sys.argv[0] = sys_argv0
github micheles / plac / doc / test_plac.py View on Github external
def check_help(name):
    sys.argv[0] = name + '.py'  # avoid issue with pytest
    plac_core._parser_registry.clear()  # makes different imports independent
    try:
        try:
            main = plac.import_main(name + '.py')
        except SyntaxError:
            if sys.version < '3':  # expected for Python 2.X
                return
            else:  # not expected for Python 3.X
                raise
        p = plac.parser_from(main)
        expected = fix_today(open(name + '.help').read()).strip()
        got = p.format_help().strip()
        assert got == expected, got
    finally:
        sys.argv[0] = sys_argv0
github micheles / plac / doc / test_plac.py View on Github external
def parser_from(f, **kw):
    f.__annotations__ = kw
    return plac.parser_from(f)
github flaviovdf / pyksc / src / scripts / plot_members.py View on Github external
centroid = centroids[k]
        plot_series(centroid, centroid_plot_foldpath, 'centroid', True)
        
        members = X[y == k]
        n_samples = members.shape[0]
        sample_rows = np.arange(n_samples)
        np.random.shuffle(sample_rows)        
        
        members_to_plot = members[sample_rows[:10]]
        for i in xrange(members_to_plot.shape[0]):
            print(k, i)
            plot_series(members_to_plot[i], centroid_plot_foldpath, 'ex-%d' % i)
            
if __name__ == '__main__':
    sys.exit(plac.call(main))
github explosion / spaCy / examples / vectors_fast_text.py View on Github external
nr_row, nr_dim = header.split()
        nlp.vocab.reset_vectors(width=int(nr_dim))
        for line in file_:
            line = line.rstrip().decode("utf8")
            pieces = line.rsplit(" ", int(nr_dim))
            word = pieces[0]
            vector = numpy.asarray([float(v) for v in pieces[1:]], dtype="f")
            nlp.vocab.set_vector(word, vector)  # add the vectors to the vocab
    # test the vectors and similarity
    text = "class colspan"
    doc = nlp(text)
    print(text, doc[0].similarity(doc[1]))


if __name__ == "__main__":
    plac.call(main)
github micheles / plac / plac_runner.py View on Github external
i.interact(verbose=verbose)
        elif multiline:
            i.multiline(verbose=verbose)
        elif serve:
            i.start_server(serve)
    elif batch:
        run((fname,) + extra, 'execute', verbose)
    elif test:
        run((fname,) + extra, 'doctest', verbose)
        print('run %s plac test(s)' % (len(extra) + 1))
    else:
        baseparser.print_usage()
main.add_help = False

if __name__ == '__main__':
    plac.call(main)
github geovedi / buangan-riset / scripts / word-alignment-phrase-extractor.py View on Github external
    max_ngram=plac.Annotation("Max N-gram length", 'option', 'm', int)
)
def main(input_file, alignment_file, output_file, max_ngram=10):
    assert input_file and alignment_file and output_file, 'missing arguments'
    with io.open(output_file, 'w', encoding='utf-8') as out, \
        io.open(input_file, 'r', encoding='utf-8') as input_f, \
        io.open(alignment_file, 'r', encoding='utf-8') as alignment_f:
        for pair, alignment in izip(input_f, alignment_f):
            source, target = pair.split(' ||| ')

            for a, b in phrase_extraction(source, target, alignment):
                a, b = whitespace_tokenizer(a), whitespace_tokenizer(b)
                if 1 <= len(a) <= max_ngram and 1 <= len(b) <= max_ngram:
                    out.write('{0} ||| {1}\n'.format(' '.join(a), ' '.join(b)))

    logging.info((output_file))
github davidenunes / exp / exp / grid / cli.py View on Github external
        job_wd=Annotation("working dir for job", 'option'),
    )
    def runs(self, script, params=None, group=1, grid="mas", jobname="job", job_cwd=False, job_wd=None):
        if not os.path.exists(params) and not os.path.isfile(script):
            print("Parameter space file not found: {path}".format(path=params), file=sys.stderr)
            sys.exit(1)

        ps = ParamSpace(filename=params)
        ps.write_grid_summary(jobname + '_params.csv')

        grid_cfg = DEFAULT_CONFIGS[grid]
        param_grid = ps.param_grid(include_id=True, id_param="id")

        job_files = write_job_files(grid_cfg, script, jobname, param_grid, group, jobname, job_cwd, job_wd)

        for job_file in job_files:
            try:
github micheles / plac / doc / picalculator.py View on Github external
def submit_tasks(self):
        npoints = math.ceil(self.npoints / self.n_cpu)
        self.i = plac.Interpreter(self).__enter__()
        return [self.i.submit('calc_pi %d' % npoints)
                for _ in range(self.n_cpu)]

plac

The smartest command line arguments parser in the world

BSD-2-Clause
Latest version published 2 months ago

Package Health Score

80 / 100
Full package analysis