How to use the future.utils.viewvalues function in future

To help you get started, we’ve selected a few future examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dedupeio / dedupe / tests / test_blocking.py View on Github external
def test_unconstrained_inverted_index(self):

        blocker = dedupe.blocking.Blocker(
            [dedupe.predicates.TfidfTextSearchPredicate(0.0, "name")])

        blocker.index(set(record["name"]
                          for record
                          in viewvalues(self.data_d)),
                      "name")

        blocks = defaultdict(set)

        for block_key, record_id in blocker(self.data_d.items()):
            blocks[block_key].add(record_id)

        blocks = set([frozenset(block) for block in blocks.values()
                      if len(block) > 1])

        assert blocks ==\
            set([frozenset([120, 125]), frozenset([130, 135])])
github dedupeio / dedupe / dedupe / blocking.py View on Github external
def indexAll(self, data_d):
        for field in self.index_fields:
            unique_fields = {record[field]
                             for record
                             in viewvalues(data_d)
                             if record[field]}
            self.index(unique_fields, field)
github cea-sec / miasm / miasm / arch / arm / sem.py View on Github external
def is_pc_written(ir, instr_ir):
    all_pc = viewvalues(ir.mn.pc)
    for ir in instr_ir:
        if ir.dst in all_pc:
            return True, ir.dst
    return False, None
github cea-sec / miasm / miasm2 / analysis / outofssa.py View on Github external
def init_phis_merge_state(self):
        """
        Generate trivial coalescing of phi variable and itself
        """
        for phi_new_var in viewvalues(self.phi_new_var):
            self.merge_state.setdefault(phi_new_var, set([phi_new_var]))
github pytorch / pytorch / caffe2 / python / data_parallel_model.py View on Github external
def _Broadcast(devices, model, net, param, use_nccl=False):
    # Copy params from gpu_0 to other
    master_dev = devices[0]

    if use_nccl:
        if _IsGPUBlob(model, param):
            master_device_opt = core.DeviceOption(model._device_type, master_dev)
            with core.DeviceScope(master_device_opt):
                # Note that the root is the root _rank_ and not the root
                # _device_. Thus we always use root=0, regardless of the
                # devices used.
                net.NCCLBroadcast(
                    list(viewvalues(model._device_grouped_blobs[param])),
                    list(viewvalues(model._device_grouped_blobs[param])),
                    root=0,
                )
                return

    for dev_idx in devices[1:]:
        if _IsGPUBlob(model, param):
            device_opt = core.DeviceOption(workspace.GpuDeviceType, dev_idx)
        else:
            device_opt = core.DeviceOption(caffe2_pb2.IDEEP, 0) if _IsIDEEPBlob(model, param) else \
                core.DeviceOption(caffe2_pb2.CPU, 0)
        with core.DeviceScope(device_opt):
            net.Copy(
                model._device_grouped_blobs[param][master_dev],
                model._device_grouped_blobs[param][dev_idx]
            )
github broadinstitute / gdctools / gdctools / gdc_dice.py View on Github external
def _write_counts(case_data, counts_file):
    '''
    Write case data as counts, return counting data for use in generating
    program counts.
    '''
    # First, put the case data into an easier format:
    # { 'TP' : {'BCR' : 10, '...': 15, ...},
    #   'TR' : {'Clinical' : 10, '...': 15, ...},
    #           ...}
    rdt = common.REPORT_DATA_TYPES
    counts = defaultdict(Counter)
    totals = Counter()
    for case in viewvalues(case_data):
        main_type = meta.tumor_code(meta.main_tumor_sample_type(case.proj_id)).symbol
        c_dict = case.case_data
        for sample_type in c_dict:
            for report_type in c_dict[sample_type]:
                counts[sample_type][report_type] += 1
                if sample_type == main_type:
                    totals[report_type] += 1

    # Now write the counts table
    with open(counts_file, 'w') as out:
        # Write header
        out.write("Sample Type\t" + "\t".join(rdt) + '\n')
        for code in counts:
            line = code + "\t"
            # Headers can use abbreviated data types
            line += "\t".join([str(counts[code][t]) for t in rdt]) + "\n"
github dedupeio / dedupe / dedupe / api.py View on Github external
def _checkData(self, data):
        if len(data) == 0:
            raise ValueError(
                'Dictionary of records is empty.')

        self.data_model.check(next(iter(viewvalues(data))))
github cea-sec / ivre / ivre / db / neo4j.py View on Github external
def _cleanup_record(cls, elt):
        for k, v in viewitems(elt):
            if isinstance(v, list) and len(v) == 1 and \
                    isinstance(v[0], dict) and \
                    all(x is None for x in viewvalues(v[0])):
                elt[k] = []

        cls.from_dbdict(cls._get_props(elt["elt"]))
        new_meta = {}
        if isinstance(elt["meta"], list):
            for rec in elt["meta"]:
                if rec["info"] is None and rec["link"] is None:
                    continue
                info = rec["info"] or {}
                info_props = cls._get_props(info)
                link = rec["link"] or {}
                link_tag = link.get("type",
                                    link.get("labels", [""])[0]).lower()
                link_props = cls._get_props(link)
                key = "%s%s" % (
                    "_".join(label
github pytorch / pytorch / caffe2 / python / experiment_util.py View on Github external
logdict['cumulative_input_count'] = input_count
        logdict['cumulative_batch_count'] = batch_count
        if delta_t > 0:
            logdict['inputs_per_sec'] = delta_count / delta_t
        else:
            logdict['inputs_per_sec'] = 0.0

        for k in sorted(viewkeys(additional_values)):
            logdict[k] = additional_values[k]

        # Write the headers if they are not written yet
        if self.headers is None:
            self.headers = list(viewkeys(logdict))
            self.logstr(",".join(self.headers))

        self.logstr(",".join(str(v) for v in viewvalues(logdict)))

        for logger in self.external_loggers:
            try:
                logger.log(logdict)
            except Exception as e:
                logging.warn(
                    "Failed to call ExternalLogger: {}".format(e), e)