How to use the six.itervalues function in six

To help you get started, we’ve selected a few six examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github oVirt / vdsm / tests / functional / utils.py View on Github external
def _vlanInRunningConfig(self, devName, vlanId):
        for attrs in six.itervalues(self.config.networks):
            if (int(vlanId) == attrs.get('vlan') and
                    (attrs.get('bonding') == devName or
                     attrs.get('nic') == devName)):
                return True
        return False
github Chilipp / psyplot / psyplot / project.py View on Github external
fmto.remove()
                        except Exception:
                            pass
                else:
                    plt.close(arr.psy.plotter.ax.get_figure().number)
                arr.psy.plotter = None
            if data:
                self.remove(arr)
                if not self.is_main:
                    try:
                        self.main.remove(arr)
                    except ValueError:  # arr not in list
                        pass
            if close_ds:
                if isinstance(arr, InteractiveList):
                    for ds in [val['ds'] for val in six.itervalues(
                               arr._get_ds_descriptions(
                                    arr.array_info(ds_description=['ds'],
                                                   standardize_dims=False)))]:
                        ds.close()
                else:
                    arr.psy.base.close()
        if self.is_main and self is gcp(True) and data:
            scp(None)
        elif self.is_main and self.is_cmp:
            self.oncpchange.emit(self)
        elif self.main.is_cmp:
            self.oncpchange.emit(self.main)
github aetros / aetros-cli / aetros / commands / ServerCommand.py View on Github external
def stop(self):
        self.active = False

        self.logger.warning('Killing %d jobs ' % (len(self.job_processes),))

        for p in six.itervalues(self.job_processes):
            p.send_signal(signal.SIGINT)

        for p in six.itervalues(self.job_processes):
            p.wait()

        self.general_logger_stdout.flush()
        self.general_logger_stderr.flush()
        self.server.close()
github GoogleContainerTools / distroless / package_manager / parse_metadata.py View on Github external
+ current_key + "\n" + current_entry)
                current_entry[current_key] = value.strip()
            else:
                raise Exception("Valid line, but no delimiter or indentation:"
                                + line)
        else:
            if current_entry:
                parsed_entries[current_entry[INDEX_KEY]] = current_entry
            current_entry = {}
            current_key = None
    if current_entry:
        parsed_entries[current_entry[INDEX_KEY]] = current_entry
    # The Filename Key is a relative url pointing to the .deb package
    # Here, we're rewriting the metadata with the absolute urls,
    # which is a concatenation of the mirror + '/debian/' + relative_path
    for pkg_data in six.itervalues(parsed_entries):
        if package_prefix:
            pkg_data[FILENAME_KEY] = package_prefix + pkg_data[FILENAME_KEY]
        else:
            pkg_data[FILENAME_KEY] = mirror_url + "/debian/" + snapshot + "/" + pkg_data[FILENAME_KEY]
    return parsed_entries
github blei-lab / edward / edward / inferences / sghmc.py View on Github external
def build_update(self):
    """Simulate Hamiltonian dynamics with friction using a discretized
    integrator. Its discretization error goes to zero as the learning
    rate decreases.

    Implements the update equations from (15) of @chen2014stochastic.
    """
    old_sample = {z: tf.gather(qz.params, tf.maximum(self.t - 1, 0))
                  for z, qz in six.iteritems(self.latent_vars)}
    old_v_sample = {z: v for z, v in six.iteritems(self.v)}

    # Simulate Hamiltonian dynamics with friction.
    learning_rate = self.step_size * 0.01
    grad_log_joint = tf.gradients(self._log_joint(old_sample),
                                  list(six.itervalues(old_sample)))

    # v_sample is so named b/c it represents a velocity rather than momentum.
    sample = {}
    v_sample = {}
    for z, grad_log_p in zip(six.iterkeys(old_sample), grad_log_joint):
      qz = self.latent_vars[z]
      event_shape = qz.event_shape
      stddev = tf.sqrt(tf.cast(learning_rate * self.friction, qz.dtype))
      normal = tf.random_normal(event_shape, dtype=qz.dtype)
      sample[z] = old_sample[z] + old_v_sample[z]
      v_sample[z] = ((1.0 - 0.5 * self.friction) * old_v_sample[z] +
                     learning_rate * tf.convert_to_tensor(grad_log_p) +
                     stddev * normal)

    # Update Empirical random variables.
    assign_ops = []
github tensorflow / data-validation / tensorflow_data_validation / statistics / stats_impl.py View on Github external
for feature_stats_proto in stats_proto.features:
      feature_path = types.FeaturePath.from_proto(feature_stats_proto.path)
      if feature_path not in stats_per_feature:
        # Make a copy for the "cache" since we are modifying it in 'else' below.
        new_feature_stats_proto = statistics_pb2.FeatureNameStatistics()
        new_feature_stats_proto.CopyFrom(feature_stats_proto)
        stats_per_feature[feature_path] = new_feature_stats_proto
      else:
        stats_for_feature = stats_per_feature[feature_path]
        # MergeFrom would concatenate repeated fields which is not what we want
        # for path.step.
        del stats_for_feature.path.step[:]
        stats_for_feature.MergeFrom(feature_stats_proto)

  num_examples = None
  for feature_stats_proto in six.itervalues(stats_per_feature):
    # Add the merged FeatureNameStatistics proto for the feature
    # into the DatasetFeatureStatistics proto.
    new_feature_stats_proto = result.features.add()
    new_feature_stats_proto.CopyFrom(feature_stats_proto)

    # Get the number of examples from one of the features that
    # has common stats.
    if num_examples is None:
      stats_type = feature_stats_proto.WhichOneof('stats')
      stats_proto = None
      if stats_type == 'num_stats':
        stats_proto = feature_stats_proto.num_stats
      else:
        stats_proto = feature_stats_proto.string_stats

      if stats_proto.HasField('common_stats'):
github SteveDoyle2 / pyNastran / pyNastran / bdf / bdf_interface / cross_reference.py View on Github external
for spline in itervalues(self.splines):
            spline.safe_cross_reference(self)
        for aecomp in itervalues(self.aecomps):
            aecomp.safe_cross_reference(self)
        for aelist in itervalues(self.aelists):
            aelist.safe_cross_reference(self)
        for aeparam in itervalues(self.aeparams):
            aeparam.safe_cross_reference(self)
        for aestat in itervalues(self.aestats):
            aestat.safe_cross_reference(self)
        #for aesurf in itervalues(self.aesurf):
            #aesurf.safe_cross_reference(self)
        for aesurfs in itervalues(self.aesurfs):
            aesurfs.safe_cross_reference(self)
        for flutter in itervalues(self.flutters):
            flutter.safe_cross_reference(self)

        if 0:  # only support CAERO1
            ncaeros = len(self.caeros)
            if ncaeros > 1:
                # we don't need to check the ncaeros=1 case
                i = 0
                min_maxs = zeros((ncaeros, 2), dtype='int32')
                for eid, caero in sorted(iteritems(self.caeros)):
                    min_maxs[i, :] = caero.min_max_eid
                    i += 1
                isort = argsort(min_maxs.ravel())
                expected = arange(ncaeros * 2, dtype='int32')
                if not array_equal(isort, expected):
                    msg = 'CAERO element ids are inconsistent\n'
                    msg += 'isort = %s' % str(isort)
github TUNE-Archive / freight_forwarder / freight_forwarder / commercial_invoice / commercial_invoice.py View on Github external
def _configure_service_dependencies(self, services):
        """
        """
        if not isinstance(services, dict):
            raise AttributeError("services is required to be a dict")

        for name, service in six.iteritems(services):
            if not isinstance(service, Service):
                raise AttributeError("{0} must be a Service.".format(name))

        for service in six.itervalues(services):
            reduced_services = services.copy()
            try:
                del reduced_services[service.name]
            except KeyError:
                pass

            service.configure_dependencies(reduced_services)
github deepmind / pysc2 / pysc2 / bin / gen_actions.py View on Github external
def generate_py_abilities(data):
  """Generate the list of functions in actions.py."""
  def print_action(func_id, name, func, ab_id, general_id):
    args = [func_id, '"%s"' % name, func, ab_id]
    if general_id:
      args.append(general_id)
    print("    Function.ability(%s)," % ", ".join(str(v) for v in args))

  func_ids = itertools.count(12)  # Leave room for the ui funcs.
  for ability in sorted(six.itervalues(data.abilities),
                        key=lambda a: sort_key(data, a)):
    ab_id = ability.ability_id
    if ab_id in skip_abilities or (ab_id not in data.general_abilities and
                                   ab_id not in used_abilities):
      continue

    name = generate_name(ability).replace(" ", "_")

    if ability.target in (sc_data.AbilityData.Target.Value("None"),
                          sc_data.AbilityData.PointOrNone):
      print_action(next(func_ids), name + "_quick", "cmd_quick", ab_id,
                   ability.remaps_to_ability_id)
    if ability.target != sc_data.AbilityData.Target.Value("None"):
      print_action(next(func_ids), name+ "_screen", "cmd_screen", ab_id,
                   ability.remaps_to_ability_id)
      if ability.allow_minimap:
github google / deepvariant / third_party / nucleus / util / ranges.py View on Github external
def __len__(self):
    """Gets the number of ranges used by this RangeSet."""
    return sum(len(for_chr) for for_chr in six.itervalues(self._by_chr))