How to use the six.iterkeys function in six

To help you get started, we’ve selected a few six examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github tensorflow / lingvo / lingvo / core / py_utils.py View on Github external
Args:
    loss_metric_weight_pairs: a list of (metrics, weight) pairs, where each
      weight is a float and each metrics is a dict with str keys and
      (metric_value, target_weight) values.

  Returns:
    A dict with the same set of keys as input metrics and values of
    (weighted_sum(metric_value), weighted_sum(target_weight)).

  Raises:
    ValueError: if there exists a metric that exists in more than one element
      of `loss_metric_weight_pairs` but not in all of them.
  """
  all_keys = set([
      k for loss_metrics, _ in loss_metric_weight_pairs
      for k in six.iterkeys(loss_metrics)
  ])
  result = {}
  for k in all_keys:
    count = 0
    for loss_metrics, weight in loss_metric_weight_pairs:
      if k in loss_metrics:
        count += 1
    if count > 1 and count != len(loss_metric_weight_pairs):
      raise ValueError('Found metric %s which exists in more than one'
                       'but not all loss metrics.' % k)

    total_val = 0
    total_target_weight = 0
    for loss_metrics, weight in loss_metric_weight_pairs:
      if k in loss_metrics:
        val, target_weight = loss_metrics[k]
github blei-lab / edward / edward / inferences / inference.py View on Github external
def _set_log_variables(self, log_vars=None):
    """Log variables to TensorBoard.

    For each variable in `log_vars`, forms a `tf.summary.scalar` if
    the variable has scalar shape; otherwise forms a `tf.summary.histogram`.

    Args:
      log_vars: list.
        Specifies the list of variables to log after each `n_print`
        steps. If None, will log all variables. If `[]`, no variables
        will be logged.
    """
    if log_vars is None:
      log_vars = []
      for key in six.iterkeys(self.data):
        log_vars += get_variables(key)

      for key, value in six.iteritems(self.latent_vars):
        log_vars += get_variables(key)
        log_vars += get_variables(value)

      log_vars = set(log_vars)

    for var in log_vars:
      # replace colons which are an invalid character
      var_name = var.name.replace(':', '/')
      # Log all scalars.
      if len(var.shape) == 0:
        tf.summary.scalar("parameter/{}".format(var_name),
                          var, collections=[self._summary_key])
      elif len(var.shape) == 1 and var.shape[0] == 1:
github inducer / loopy / loopy / kernel / __init__.py View on Github external
def all_variable_names(self, include_temp_storage=True):
        return (
                set(six.iterkeys(self.temporary_variables))
                | set(tv.base_storage
                    for tv in six.itervalues(self.temporary_variables)
                    if tv.base_storage is not None and include_temp_storage)
                | set(six.iterkeys(self.substitutions))
                | set(arg.name for arg in self.args)
                | set(self.all_inames()))
github DataDog / integrations-core / consul / datadog_checks / consul / consul.py View on Github external
self.warning('More than %d services in whitelist. Service list will be truncated.', self.max_services)

            whitelisted_services = [s for s in services if s in self.service_whitelist]
            services = {s: services[s] for s in whitelisted_services[: self.max_services]}
        else:
            if len(services) <= self.max_services:
                log_line = 'Consul service whitelist not defined. Agent will poll for all {} services found'.format(
                    len(services)
                )
                self.log.debug(log_line)
            else:
                log_line = 'Consul service whitelist not defined. Agent will poll for at most {} services'.format(
                    self.max_services
                )
                self.warning(log_line)
                services = {s: services[s] for s in list(islice(iterkeys(services), 0, self.max_services))}

        return services
github apple / turicreate / src / python / turicreate / toolkits / recommender / util.py View on Github external
def __prepare_dataset_parameter(self, dataset):
        """
        Processes the dataset parameter for type correctness.
        Returns it as an SFrame.
        """

        # Translate the dataset argument into the proper type
        if not isinstance(dataset, _SFrame):
            def raise_dataset_type_exception():
                raise TypeError("The dataset parameter must be either an SFrame, "
                                "or a dictionary of (str : list) or (str : value).")

            if type(dataset) is dict:
                if not all(type(k) is str for k in _six.iterkeys(dataset)):
                    raise_dataset_type_exception()

                if all(type(v) in (list, tuple, _array.array) for v in _six.itervalues(dataset)):
                    dataset = _SFrame(dataset)
                else:
                    dataset = _SFrame({k : [v] for k, v in _six.iteritems(dataset)})
            else:
                raise_dataset_type_exception()

        return dataset
github jaegertracing / jaeger-client-python / jaeger_client / metrics / metrics.py View on Github external
def _get_key(self, name, tags=None):
        if not tags:
            return name
        key = name
        for k in sorted(six.iterkeys(tags)):
            key = key + '.' + str(k) + '_' + str(tags[k])
        return key
github oracle / oci-python-sdk / src / oci / identity / identity_client.py View on Github external
server error without risk of executing that same action again. Retry tokens expire after 24
            hours, but can be invalidated before then due to conflicting operations (e.g., if a resource
            has been deleted and purged from the system, then a retry of the original creation request
            may be rejected).

        :return: A :class:`~oci.response.Response` object with data of type :class:`~oci.identity.models.SwiftPassword`
        :rtype: :class:`~oci.response.Response`
        """
        resource_path = "/users/{userId}/swiftPasswords/"
        method = "POST"

        # Don't accept unknown kwargs
        expected_kwargs = [
            "opc_retry_token"
        ]
        extra_kwargs = [key for key in six.iterkeys(kwargs) if key not in expected_kwargs]
        if extra_kwargs:
            raise ValueError(
                "create_swift_password got unknown kwargs: {!r}".format(extra_kwargs))

        path_params = {
            "userId": user_id
        }

        path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}

        for (k, v) in six.iteritems(path_params):
            if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
                raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))

        header_params = {
            "accept": "application/json",
github zylo117 / tensorflow-gpu-macosx / tensorflow / python / util / nest.py View on Github external
"The two structures don't have the same sequence type. Input "
            "structure has type %s, while shallow structure has type %s."
            % (type(input_tree), type(shallow_tree)))

    if len(input_tree) != len(shallow_tree):
      raise ValueError(
          "The two structures don't have the same sequence length. Input "
          "structure has length %s, while shallow structure has length %s."
          % (len(input_tree), len(shallow_tree)))

    if check_types and isinstance(shallow_tree, dict):
      if set(input_tree) != set(shallow_tree):
        raise ValueError(
            "The two structures don't have the same keys. Input "
            "structure has keys %s, while shallow structure has keys %s." %
            (list(_six.iterkeys(input_tree)),
             list(_six.iterkeys(shallow_tree))))

      input_tree = list(sorted(_six.iteritems(input_tree)))
      shallow_tree = list(sorted(_six.iteritems(shallow_tree)))

    for shallow_branch, input_branch in zip(shallow_tree, input_tree):
      assert_shallow_structure(shallow_branch, input_branch,
                               check_types=check_types)
github googleapis / google-auth-library-python / google / auth / _service_account_info.py View on Github external
Args:
        data (Mapping[str, str]): The service account data
        require (Sequence[str]): List of keys required to be present in the
            info.

    Returns:
        google.auth.crypt.Signer: A signer created from the private key in the
            service account file.

    Raises:
        ValueError: if the data was in the wrong format, or if one of the
            required keys is missing.
    """
    keys_needed = set(require if require is not None else [])

    missing = keys_needed.difference(six.iterkeys(data))

    if missing:
        raise ValueError(
            "Service account info was not in the expected format, missing "
            "fields {}.".format(", ".join(missing))
        )

    # Create a signer.
    signer = crypt.RSASigner.from_service_account_info(data)

    return signer