How to use the toolz.partial function in toolz

To help you get started, we’ve selected a few toolz examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Ambrosys / glyph / examples / control / minimal_example.py View on Github external
def main2():
    """Mit modul application (aber ohne deap)."""

    mate = application.MateFactory.create(dict(mating='cxonepoint', mating_max_height=20), Individual)
    mutate = application.MutateFactory.create(dict(mutation='mutuniform', mutation_max_height=20), Individual)
    select = application.SelectFactory.create(dict(select='nsga2'))
    create = application.CreateFactory.create(dict(create_method='halfandhalf', create_min_height=1, create_max_height=4), Individual)
    algorithm_config = dict(algorithm='nsga2', crossover_prob=0.5,  mutation_prob=0.2, tournament_size=2)
    algorithm_factory = partial(application.AlgorithmFactory.create, algorithm_config, mate, mutate, select, create)
    runner = application.GPRunner(Individual, algorithm_factory, update_fitness)

    runner.init(pop_size=pop_size)
    for gen in range(10):
        runner.step()
        print(runner.logbook.stream)
    for individual in runner.pareto_front:
        print(individual)
github microscopium / microscopium / microscopium / main.py View on Github external
"""Run image feature computation.

    Parameters
    ----------
    args : argparse.Namespace
        The arguments parsed by the argparse library.
    """
    if args.global_threshold:
        images = map(io.imread, args.images)
        thresholds = pre.global_threshold(images, args.random_seed)
    else:
        thresholds = None
    images = map(io.imread, args.images)
    screen_info = screens.d[args.screen]
    index_function, fmap = screen_info['index'], screen_info['fmap']
    fmap = tz.partial(fmap, threshold=thresholds,
                            sample_size=args.sample_size,
                            random_seed=args.random_seed)
    indices = list(map(index_function, args.images))
    f0, feature_names = fmap(next(images))
    feature_vectors = tz.cons(f0, (fmap(im)[0] for im in images))
    online_scaler = StandardScaler()
    online_pca = cluster.OnlineIncrementalPCA(n_components=args.n_components,
                                              batch_size=args.pca_batch_size)
    nimages, nfeatures = len(args.images), len(f0)
    emit = io.emitter_function(args.emitter)
    with temporary_hdf5_dataset((nimages, nfeatures), 'float') as dset:
        # First pass: compute the features, compute the mean and SD,
        # compute the PCA
        for i, (idx, v) in enumerate(zip(indices, feature_vectors)):
            emit({'_id': idx, 'feature_vector': list(v)})
            dset[i] = v
github Ambrosys / glyph / examples / control / minimal_example.py View on Github external
def main1():
    """Komplett ohne modul application."""
    import deap

    mate = deap.gp.cxOnePoint
    expr_mut = partial(deap.gp.genFull, min_=0, max_=2)
    mutate = partial(deap.gp.mutUniform, expr=expr_mut, pset=Individual.pset)
    algorithm = gp.NSGA2(mate, mutate)

    population = Individual.create_population(pop_size)
    update_fitness(population)
    for gen in range(10):
        population = algorithm.evolve(population)
        update_fitness(population)
        print('generation:', gen)
    print('Solutions:', population)
github Ambrosys / glyph / examples / control / minimal_example.py View on Github external
pset = gp.sympy_primitive_set(categories=['algebraic', 'trigonometric', 'exponential'],
                              arguments=['y0', 'y1'])
Individual = gp.Individual(pset=pset, name="Individual")


def phenotype(individual):
    """Produce phenotype from Individual."""
    return gp.sympy_phenotype(individual)

# Setup dynamic system.
x = np.linspace(0.0, 2.0 * np.pi, 2000, dtype=np.float64)
dynsys = partial(control_problem.anharmonic_oscillator, omega=1.0, c=3.0 / 8.0, k=0.0)
# Define target of control.
target = np.sin(x)
# Define measure.
trajectory = compose(partial(control_problem.integrate, yinit=[1.0, 0.0], x=x), dynsys, phenotype)
rmse = partial(utils.numeric.rmse, target)
dynsys_measure = assessment.measure(rmse, pre=compose(lambda arr: arr[0], trajectory))
complete_measure = assessment.measure(dynsys_measure, len, post=assessment.replace_nan)


def update_fitness(population):
    invalid = [p for p in population if not p.fitness.valid]
    fitnesses = map(complete_measure, invalid)
    for ind, fit in zip(invalid, fitnesses):
        ind.fitness.values = fit
    return len(invalid)


def main1():
    """Komplett ohne modul application."""
    import deap
github Ambrosys / glyph / glyph / application.py View on Github external
log_level = utils.logging.log_level(args.verbosity)
    utils.logging.load_config(
        config_file=args.logging_config, level=log_level, placeholders=dict(workdir=workdir)
    )

    if args.resume_file is not None:
        logger.debug("Loading checkpoint {}".format(args.resume_file))
        app = Application.from_checkpoint(args.resume_file)
        return app, args
    else:
        mate = MateFactory.create(args, IndividualClass)
        mutate = MutateFactory.create(args, IndividualClass)
        select = SelectFactory.create(args)
        create_method = CreateFactory.create(args, IndividualClass)
        algorithm_factory = toolz.partial(AlgorithmFactory.create, args, mate, mutate, select, create_method)
        parallel_factory = toolz.partial(ParallelizationFactory.create, args)
        assessment_runner = AssessmentRunnerClass(parallel_factory)
        gp_runner = GPRunner(IndividualClass, algorithm_factory, assessment_runner)
        app = Application(args, gp_runner, args.checkpoint_file, callbacks=callbacks)
        return app, args
github bmabey / provenance / provenance / repos.py View on Github external
def _filename(self, id):
        return cs.chained_filename(self, id)


### ArtifactSet logic


def _set_op(operator, *sets, labels=None):
    new_ids = t.reduce(operator, t.map(lambda s: s.artifact_ids, sets))
    return ArtifactSet(new_ids, labels)


set_union = t.partial(_set_op, ops.or_)
set_difference = t.partial(_set_op, ops.sub)
set_intersection = t.partial(_set_op, ops.and_)

artifact_set_properties = ['id', 'artifact_ids', 'created_at', 'labels']


class ArtifactSet(namedtuple('ArtifactSet', artifact_set_properties)):

    def __new__(cls, artifact_ids, labels=None, created_at=None, id=None):
        artifact_ids = t.map(_artifact_id, artifact_ids)
        labels = _check_labels_name(labels)
        ids = frozenset(artifact_ids)
        if id:
            set_id = id
        else:
            set_id = hash(ids)
        created_at = created_at if created_at else datetime.utcnow()
        return super(ArtifactSet, cls).__new__(cls, set_id, ids, created_at, labels)
github ethereum / trinity / evm / logic / push.py View on Github external
def push_XX(message, state, storage, size):
    raw_value = state.code.read_raw(size)
    padded_value = pad_right(raw_value, size, b'\x00')

    logger.info('PUSH%s: %s', size, padded_value)
    state.stack.push(padded_value)

    state.consume_gas(COST_VERYLOW)


push1 = partial(push_XX, size=1)
push2 = partial(push_XX, size=2)
push3 = partial(push_XX, size=3)
push4 = partial(push_XX, size=4)
push5 = partial(push_XX, size=5)
push6 = partial(push_XX, size=6)
push7 = partial(push_XX, size=7)
push8 = partial(push_XX, size=8)
push9 = partial(push_XX, size=9)
push10 = partial(push_XX, size=10)
push11 = partial(push_XX, size=11)
push12 = partial(push_XX, size=12)
push13 = partial(push_XX, size=13)
push14 = partial(push_XX, size=14)
push15 = partial(push_XX, size=15)
push16 = partial(push_XX, size=16)
push17 = partial(push_XX, size=17)
push18 = partial(push_XX, size=18)
push19 = partial(push_XX, size=19)
push20 = partial(push_XX, size=20)
push21 = partial(push_XX, size=21)
github Ambrosys / glyph / examples / control / minimal_example.py View on Github external
arguments=['y0', 'y1'])
Individual = gp.Individual(pset=pset, name="Individual")


def phenotype(individual):
    """Produce phenotype from Individual."""
    return gp.sympy_phenotype(individual)

# Setup dynamic system.
x = np.linspace(0.0, 2.0 * np.pi, 2000, dtype=np.float64)
dynsys = partial(control_problem.anharmonic_oscillator, omega=1.0, c=3.0 / 8.0, k=0.0)
# Define target of control.
target = np.sin(x)
# Define measure.
trajectory = compose(partial(control_problem.integrate, yinit=[1.0, 0.0], x=x), dynsys, phenotype)
rmse = partial(utils.numeric.rmse, target)
dynsys_measure = assessment.measure(rmse, pre=compose(lambda arr: arr[0], trajectory))
complete_measure = assessment.measure(dynsys_measure, len, post=assessment.replace_nan)


def update_fitness(population):
    invalid = [p for p in population if not p.fitness.valid]
    fitnesses = map(complete_measure, invalid)
    for ind, fit in zip(invalid, fitnesses):
        ind.fitness.values = fit
    return len(invalid)


def main1():
    """Komplett ohne modul application."""
    import deap
github rmax / databrewer / src / databrewer / cli.py View on Github external
def cli_files(obj, name_spec):
    index = SearchIndex(obj['rc']['index_dir'])
    name = name_spec.partition('[')[0]
    recipe = index.get(name)
    if not recipe:
        _fail("Recipe '%s' not found" % name)

    datasets_dir = obj['rc']['datasets_dir']
    get_location = toolz.partial(os.path.join, datasets_dir, name)

    files = list(recipes.match_files(recipe, name_spec))
    if files:
        for spec in files:
            click.echo(get_location(spec['filename']))
    else:
        _fail("File '%s' not found" % name_spec)
github pytoolz / toolz / toolz / curried / __init__.py View on Github external
excepts = toolz.curry(toolz.excepts)
filter = toolz.curry(toolz.filter)
get = toolz.curry(toolz.get)
get_in = toolz.curry(toolz.get_in)
groupby = toolz.curry(toolz.groupby)
interpose = toolz.curry(toolz.interpose)
itemfilter = toolz.curry(toolz.itemfilter)
itemmap = toolz.curry(toolz.itemmap)
iterate = toolz.curry(toolz.iterate)
join = toolz.curry(toolz.join)
keyfilter = toolz.curry(toolz.keyfilter)
keymap = toolz.curry(toolz.keymap)
map = toolz.curry(toolz.map)
mapcat = toolz.curry(toolz.mapcat)
nth = toolz.curry(toolz.nth)
partial = toolz.curry(toolz.partial)
partition = toolz.curry(toolz.partition)
partition_all = toolz.curry(toolz.partition_all)
partitionby = toolz.curry(toolz.partitionby)
peekn = toolz.curry(toolz.peekn)
pluck = toolz.curry(toolz.pluck)
random_sample = toolz.curry(toolz.random_sample)
reduce = toolz.curry(toolz.reduce)
reduceby = toolz.curry(toolz.reduceby)
remove = toolz.curry(toolz.remove)
sliding_window = toolz.curry(toolz.sliding_window)
sorted = toolz.curry(toolz.sorted)
tail = toolz.curry(toolz.tail)
take = toolz.curry(toolz.take)
take_nth = toolz.curry(toolz.take_nth)
topk = toolz.curry(toolz.topk)
unique = toolz.curry(toolz.unique)