How to use the ergo.core.queue.TaskQueue function in ergo

To help you get started, we’ve selected a few ergo examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github evilsocket / ergo / ergo / actions / relevance.py View on Github external
log.error("error while loading project: %s", err)
        quit()
    elif not prj.is_trained():
        log.error("no trained Keras model found for this project")
        quit()

    prj.prepare(args.dataset, 0.0, 0.0)

    # one single worker in blocking mode = serial
    if args.workers == 0:
        args.workers = 1

    X, y         = prj.dataset.subsample(args.ratio)
    nrows, ncols = X.shape if prj.dataset.is_flat else (X[0].shape[0], len(X))
    attributes   = get_attributes(args.attributes, ncols)
    queue        = TaskQueue('relevance', num_workers=args.workers, blocking=True)
   
    if args.workers == 1:
        log.info("computing relevance of %d attributes on %d samples using '%s' metric (slow mode) ...", ncols, nrows, args.metric)
    else:
        log.info("computing relevance of %d attributes on %d samples using '%s' metric (parallel with %d workers) ...", ncols, nrows, args.metric, queue.num_workers)

    start = time.time()
    ref_accu, ref_cm = prj.accuracy_for(X, y, repo_as_dict = True)
    speed = (1.0 / (time.time() - start)) * nrows

    for col in range(0, ncols):
        queue.add_task( run_inference_without, 
                X, y, col, prj.dataset.is_flat, 
                ref_accu['weighted avg'][args.metric], 
                args.metric)
github evilsocket / ergo / ergo / actions / encode.py View on Github external
elif args.multi:
        log.info("parsing multiple inputs from %s ...", args.path)
        label = label_of(args, args.path)
        with open(args.path, 'rt') as fp:
            for line in fp:
                inputs.append((label, line))

    else:
        label = label_of(args, args.path)
        inputs.append((label, args.path))

    # one encoding queue that pushes to another queue that centralizes
    # append operations to a single writer process
    num_in = len(inputs)
    enc_q = TaskQueue('encoding', args.workers)
    res_q = multiprocessing.Queue()
    app_p = multiprocessing.Process(target=appender, args=(args.output, num_in, res_q))

    # open the output file and start waiting for lines to append
    app_p.start()

    log.info("encoding %d inputs to %s ...", num_in, args.output)
    for (y, x) in inputs:
        enc_q.add_task(parse_input, prj, x, y, res_q, args.delete)

    # wait for all inputs to be encoded
    enc_q.join()
    # let the writer know there are no more inputs to read
    res_q.put(None)
    # wait for the writer to finish
    app_p.join()