How to use the conda.common.compat.itervalues function in conda

To help you get started, we’ve selected a few conda examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github conda / conda / tests / data / View on Github external


    all_package_names = set(info['name'] for info in itervalues(keep))
    for fn, info in r6json['packages'].items():
        if info['name'] in keep_list:
            _keep[fn] = info
            for dep in info['depends']:
                dep = dep.split()[0]
                if dep not in keep_list and dep not in all_package_names:
    if missing_in_whitelist:
        print(">>> missing 6 <<<")

    # patch 'perl-*' to include an explicit dependency on perl, as from the 'perl-threaded' package
    perl_info_dicts = tuple(info for info in _keep.values() if info['name'].startswith('perl-'))
    for info in perl_info_dicts:
        if not any(dep.startswith("perl ") for dep in info['depends']):
            info['depends'].append('perl 5.22.0*')
github conda / conda / conda / core / View on Github external
def _verify_transaction_level(prefix_setups):
        # 1. make sure we're not removing conda from conda's env
        # 2. make sure we're not removing a conda dependency from conda's env
        # 3. enforce context.disallowed_packages
        # 4. make sure we're not removing pinned packages without no-pin flag
        # 5. make sure conda-meta/history for each prefix is writable
        # TODO: Verification 4

        conda_prefixes = (join(context.root_prefix, 'envs', '_conda_'), context.root_prefix)
        conda_setups = tuple(setup for setup in itervalues(prefix_setups)
                             if setup.target_prefix in conda_prefixes)

        conda_unlinked = any( == 'conda'
                             for setup in conda_setups
                             for prec in setup.unlink_precs)

        conda_prec, conda_final_setup = next(
            ((prec, setup)
             for setup in conda_setups
             for prec in setup.link_precs
             if == 'conda'),
            (None, None)

        if conda_unlinked and conda_final_setup is None:
            # means conda is being unlinked and not re-linked anywhere
github conda / conda / conda / core / View on Github external
def _get_pfe(self):
        from .package_cache_data import ProgressiveFetchExtract
        if self._pfe is not None:
            pfe = self._pfe
        elif not self.prefix_setups:
            self._pfe = pfe = ProgressiveFetchExtract(())
            link_precs = set(concat(stp.link_precs for stp in itervalues(self.prefix_setups)))
            self._pfe = pfe = ProgressiveFetchExtract(link_precs)
        return pfe
github conda / conda / conda / base / View on Github external
def custom_channels(self):
        from import Channel
        custom_channels = (Channel.make_simple_channel(self.channel_alias, url, name)
                           for name, url in iteritems(self._custom_channels))
        channels_from_multichannels = concat(channel for channel
                                             in itervalues(self.custom_multichannels))
        all_channels = odict((, x) for x in (ch for ch in concatv(
        return all_channels
github conda / conda / conda / View on Github external
session=session), url)
                                         for url in reversed(channel_urls)])
            for future in future_to_url:
                url = future_to_url[future]
                repodatas.append((url, future.result()))
    except ImportError:
        # concurrent.futures is only available in Python 3
        repodatas = map(lambda url: (url, fetch_repodata(url,
                                     use_cache=use_cache, session=session)),

    for url, repodata in repodatas:
        if repodata is None:
        new_index = repodata['packages']
        for info in itervalues(new_index):
            info['channel'] = url
    if unknown:
    if config.add_pip_as_python_dependency:
    return index
github conda / conda / conda / cli / View on Github external

        from collections import OrderedDict

        d = OrderedDict((key, getattr(context, key))
                        for key in context.list_parameters())
        if context.json:
            print(json.dumps(d, sort_keys=True, indent=2, separators=(',', ': '),
            # coerce channels
            d['custom_channels'] = {
       "%s://%s" % (channel.scheme, channel.location)
                for channel in itervalues(d['custom_channels'])
            # TODO: custom_multichannels needs better formatting
            d['custom_multichannels'] = {k: json.dumps([text_type(c) for c in chnls])
                                         for k, chnls in iteritems(d['custom_multichannels'])}


    if args.describe:
        paramater_names = context.list_parameters()
        if context.json:
            print(json.dumps([context.describe_parameter(name) for name in paramater_names],
                             sort_keys=True, indent=2, separators=(',', ': '),
github conda / conda / conda / core / View on Github external
and any( == "conda" for dist in solution)):
                specs_map["conda"] = MatchSpec("conda")

        # add in explicitly requested specs from specs_to_add
        # this overrides any name-matching spec already in the spec map
        specs_map.update((, s) for s in specs_to_add)

        # collect additional specs to add to the solution
        track_features_specs = pinned_specs = ()
        if context.track_features:
            track_features_specs = tuple(MatchSpec(x + '@') for x in context.track_features)
        if not ignore_pinned:
            pinned_specs = get_pinned_specs(self.prefix)

        final_environment_specs = IndexedSet(concatv(

        # We've previously checked `solution` for consistency (which at that point was the
        # pre-solve state of the environment). Now we check our compiled set of
        # `final_environment_specs` for the possibility of a solution.  If there are conflicts,
        # we can often avoid them by neutering specs that have a target (e.g. removing version
        # constraint) and also making them optional. The result here will be less cases of
        # `UnsatisfiableError` handed to users, at the cost of more packages being modified
        # or removed from the environment.
        conflicting_specs = r.get_conflicting_specs(tuple(final_environment_specs))
        if log.isEnabledFor(DEBUG):
            log.debug("conflicting specs: %s", dashlist(conflicting_specs))
        for spec in conflicting_specs:
github conda / conda / conda / models / View on Github external
def _topo_sort_handle_cycles(cls, graph):
        # remove edges that point directly back to the node
        for k, v in iteritems(graph):

        # disconnected nodes go first
        nodes_that_are_parents = set(node for parents in itervalues(graph) for node in parents)
        nodes_without_parents = (node for node in graph if not graph[node])
        disconnected_nodes = sorted(
            (node for node in nodes_without_parents if node not in nodes_that_are_parents),
            key=lambda x:
        for node in disconnected_nodes:
            yield node

        t = cls._toposort_raise_on_cycles(graph)

        while True:
                value = next(t)
                yield value
            except CyclicalDependencyError as e:
                # TODO: Turn this into a warning, but without being too annoying with
github conda / conda / conda / core / View on Github external

        index, r = self._prepare(prepared_specs)

        if specs_to_remove:
            # In a previous implementation, we invoked SAT here via `r.remove()` to help with
            # spec removal, and then later invoking SAT again via `r.solve()`. Rather than invoking
            # SAT for spec removal determination, we can use the PrefixGraph and simple tree
            # traversal if we're careful about how we handle features. We still invoke sat via
            # `r.solve()` later.
            _track_fts_specs = (spec for spec in specs_to_remove if 'track_features' in spec)
            feature_names = set(concat(spec.get_raw_value('track_features')
                                       for spec in _track_fts_specs))
            graph = PrefixGraph((index[dist] for dist in solution), itervalues(specs_map))

            removed_records = []
            for spec in specs_to_remove:
                # If the spec was a track_features spec, then we need to also remove every
                # package with a feature that matches the track_feature. The
                # `graph.remove_spec()` method handles that for us.
                log.trace("using PrefixGraph to remove records for %s", spec)

            for rec in removed_records:
                # We keep specs (minus the feature part) for the non provides_features packages
                # if they're in the history specs.  Otherwise, we pop them from the specs_map.
                rec_has_a_feature = set(rec.features or ()) & feature_names
                if rec_has_a_feature and in specs_from_history_map:
                    spec = specs_map.get(, MatchSpec(
                    spec._match_components.pop('features', None)
github conda / conda / conda / core / View on Github external
def linked_data(prefix, ignore_channels=False):
    Return a dictionary of the linked packages in prefix.
    pd = PrefixData(prefix)
    return {Dist(prefix_record): prefix_record for prefix_record in itervalues(pd._prefix_records)}