How to use the conda.common.compat.iteritems function in conda

To help you get started, we’ve selected a few conda examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github conda / conda / tests / View on Github external
"build_number": 5,
            "depends": [
              "mkl-rt 11.0",
              "python 2.7*"
            "features": "mkl",
            "name": "numpy",
            "pub_date": "2013-04-29",
            "requires": [
              "mkl-rt 11.0",
              "python 2.7"
            "version": "1.6.2"

    index2 = {Dist(key): value for key, value in iteritems(index2)}
    r2 = Resolve(index2)

    # This should not pick any mkl packages (the difference here is that none
    # of the specs directly have mkl versions)
    assert r2.solve(['pandas 0.12.0 np16py27_0', 'python 2.7*'],
        returnall=True) == [[Dist(fname) for fname in [
github conda / conda / tests / View on Github external
def supplement_index_with_repodata(index, repodata, channel, priority):
    repodata_info = repodata['info']
    arch = repodata_info.get('arch')
    platform = repodata_info.get('platform')
    subdir = repodata_info.get('subdir')
    if not subdir:
        subdir = "%s-%s" % (repodata_info['platform'], repodata_info['arch'])
    auth = channel.auth
    for fn, info in iteritems(repodata['packages']):
        rec = PackageRecord.from_objects(info,
                                         # schannel=schannel,
                                         # url=join_url(channel_url, fn),
        index[rec] = rec
github conda / conda / tests / core / View on Github external
def test_get_index_win64_platform(self):
        win64 = 'win-64'
        index = get_index(platform=win64)
        for dist, record in iteritems(index):
            assert platform_in_record(win64, record), (win64, record.url)
github conda / conda / conda / cli / View on Github external
def format_dict(d):
    from ..common.configuration import pretty_list, pretty_map

    lines = []
    for k, v in iteritems(d):
        if isinstance(v, collections.Mapping):
            if v:
                lines.append("%s:" % k)
                lines.append("%s: {}" % k)
        elif isiterable(v):
            if v:
                lines.append("%s:" % k)
                lines.append("%s: []" % k)
            lines.append("%s: %s" % (k, v if v is not None else "None"))
    return lines
github conda / conda / conda / common / View on Github external
If on linux, returns (libc_family, version), otherwise (None, None)

    if not sys.platform.startswith('linux'):
        return None, None

    from os import confstr, confstr_names, readlink

    # Python 2.7 does not have either of these keys in confstr_names, so provide
    # hard-coded defaults and assert if the key is in confstr_names but differs.
    # These are defined by POSIX anyway so should never change.
    confstr_names_fallback = OrderedDict([('CS_GNU_LIBC_VERSION', 2),
                                          ('CS_GNU_LIBPTHREAD_VERSION', 3)])

    val = None
    for k, v in iteritems(confstr_names_fallback):
        assert k not in confstr_names or confstr_names[k] == v, (
            "confstr_names_fallback for %s is %s yet in confstr_names it is %s"
            "" % (k, confstr_names_fallback[k], confstr_names[k])
            val = str(confstr(v))
        except:  # pragma: no cover
            if val:

    if not val:  # pragma: no cover
        # Weird, play it safe and assume glibc 2.5
        family, version = 'glibc', '2.5'
        log.warning("Failed to detect libc family and version, assuming %s/%s", family, version)
github conda / conda / conda / core / View on Github external
def print_transaction_summary(self):
        legacy_action_groups = self._make_legacy_action_groups()

        download_urls = set(axn.url for axn in self._pfe.cache_actions)

        for actions, (prefix, stp) in zip(legacy_action_groups, iteritems(self.prefix_setups)):
            change_report = self._calculate_change_report(prefix, stp.unlink_precs, stp.link_precs,
                                                          download_urls, stp.remove_specs,
            change_report_str = self._change_report_str(change_report)

        return legacy_action_groups
github conda / conda / conda / models / View on Github external
def merge(cls, match_specs):
        match_specs = tuple(cls(s) for s in match_specs)
        grouped = groupby(lambda spec: spec.get_exact_value('name'), match_specs)
        dont_merge_these = grouped.pop('*', []) + grouped.pop(None, [])
        specs_map = {
            name: reduce(lambda x, y: x._merge(y), specs) if len(specs) > 1 else specs[0]
            for name, specs in iteritems(grouped)
        return tuple(concatv(itervalues(specs_map), dont_merge_these))
github conda / conda / conda / core / View on Github external
log.debug("conflicting specs: %s", dashlist(conflicting_specs))
        for spec in conflicting_specs:
                neutered_spec = MatchSpec(,, optional=True)

        # Finally! We get to call SAT.
        if log.isEnabledFor(DEBUG):
            log.debug("final specs to add: %s",
                      dashlist(sorted(text_type(s) for s in final_environment_specs)))
        solution = r.solve(tuple(final_environment_specs))  # return value is List[dist]

        # add back inconsistent packages to solution
        if add_back_map:
            for name, (dist, spec) in iteritems(add_back_map):
                if not any( == name for d in solution):
                    if spec:

        # Special case handling for various DepsModifer flags. Maybe this block could be pulled
        # out into its own non-public helper method?
        if deps_modifier == DepsModifier.NO_DEPS:
            # In the NO_DEPS case, we need to start with the original list of packages in the
            # environment, and then only modify packages that match specs_to_add or
            # specs_to_remove.
            _no_deps_solution = IndexedSet(Dist(rec) for rec in prefix_data.iter_records())
            only_remove_these = set(dist
                                    for spec in specs_to_remove
                                    for dist in _no_deps_solution
                                    if spec.match(index[dist]))
github conda / conda / conda / models / View on Github external
def _toposort_raise_on_cycles(cls, graph):
        if not graph:

        while True:
            no_parent_nodes = IndexedSet(sorted(
                (node for node, parents in iteritems(graph) if len(parents) == 0),
                key=lambda x:
            if not no_parent_nodes:

            for node in no_parent_nodes:
                yield node
                graph.pop(node, None)

            for parents in itervalues(graph):
                parents -= no_parent_nodes

        if len(graph) != 0:
            raise CyclicalDependencyError(tuple(graph))
github conda / conda / conda / View on Github external
slist = [ms]
            while slist:
                ms2 = slist.pop()
                deps = rec.setdefault(, set())
                for fkey in self.find_matches(ms2):
                    if fkey not in deps:
                        slist.extend(ms3 for ms3 in self.ms_depends(fkey) if !=
        # Find the list of dependencies they have in common. And for each of
        # *those*, find the individual packages that they all share. Those need
        # to be removed as conflict candidates.
        commkeys = set.intersection(*(set(s.keys()) for s in sdeps.values()))
        commkeys = {k: set.intersection(*(v[k] for v in sdeps.values())) for k in commkeys}
        # and find the dependency chains that lead to them.
        bad_deps = []
        for ms, sdep in iteritems(sdeps):
            filter = {}
            for mn, v in sdep.items():
                if mn != and mn in commkeys:
                    # Mark this package's "unique" dependencies as invali
                    for fkey in v - commkeys[mn]:
                        filter[fkey] = False
            # Find the dependencies that lead to those invalid choices
            ndeps = set(self.invalid_chains(ms, filter))
            # This may produce some additional invalid chains that we
            # don't care about. Select only those that terminate in our
            # predetermined set of "common" keys.
            ndeps = [nd for nd in ndeps if nd[-1].name in commkeys]
            if ndeps:
                # This means the package *itself* was the common conflict.