How to use the conda.base.context.context function in conda

To help you get started, we’ve selected a few conda examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github conda / conda / tests / helpers.py View on Github external
def get_index_r_1(subdir=context.subdir):
    with open(join(dirname(__file__), 'data', 'index.json')) as fi:
        packages = json.load(fi)
        repodata = {
            "info": {
                "subdir": subdir,
                "arch": context.arch_name,
                "platform": context.platform,
            },
            "packages": packages,
        }

    channel = Channel('https://conda.anaconda.org/channel-1/%s' % subdir)
    sd = SubdirData(channel)
    with env_var("CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol):
        sd._process_raw_repodata_str(json.dumps(repodata))
    sd._loaded = True
github conda / conda / tests / models / test_package_info.py View on Github external
def test_package_info(self):
        index_json_record = IndexRecord(build=0, build_number=0, name="test_foo", version=0,
                                        channel='defaults', subdir=context.subdir, fn='doesnt-matter',
                                        md5='0123456789')
        icondata = "icondata"
        package_metadata = PackageMetadata(
            package_metadata_version=1,
            noarch=Noarch(type="python", entry_points=["test:foo"]),
        )

        paths = [PathData(_path="test/path/1", file_mode=FileMode.text, path_type=PathType.hardlink,
                          prefix_placeholder="/opt/anaconda1anaconda2anaconda3", ),
                 PathData(_path="test/path/2", no_link=True, path_type=PathType.hardlink),
                 PathData(_path="test/path/3", path_type=PathType.softlink),
                 PathData(_path="menu/test.json", path_type=PathType.hardlink)]
        paths_data = PathsData(paths_version=0, paths=paths)

        package_info = PackageInfo(
            extracted_package_dir='/some/path',
github conda / conda / conda / history.py View on Github external
minimum_conda_version = sorted(conda_versions_from_history, key=VersionOrder)[-1]
            minimum_major_minor = '.'.join(take(2, minimum_conda_version.split('.')))
            current_major_minor = '.'.join(take(2, CONDA_VERSION.split('.')))
            if VersionOrder(current_major_minor) < VersionOrder(minimum_major_minor):
                message = dals("""
                This environment has previously been operated on by a conda version that's newer
                than the conda currently being used. A newer version of conda is required.
                  target environment location: %(target_prefix)s
                  current conda version: %(conda_version)s
                  minimum conda version: %(minimum_version)s
                """) % {
                    "target_prefix": self.prefix,
                    "conda_version": CONDA_VERSION,
                    "minimum_version": minimum_major_minor,
                }
                if not paths_equal(self.prefix, context.root_prefix):
                    message += dedent("""
                    Update conda and try again.
                        $ conda install -p "%(base_prefix)s" "conda>=%(minimum_version)s"
                    """) % {
                        "base_prefix": context.root_prefix,
                        "minimum_version": minimum_major_minor,
                    }
                message += dedent("""
                To work around this restriction, one can also set the config parameter
                'allow_conda_downgrades' to False at their own risk.
                """)

                raise CondaUpgradeError(message)

        return res
github QuantStack / mamba / mamba / mamba.py View on Github external
check_non_admin()

    newenv = bool(command == 'create')
    isinstall = bool(command == 'install')
    solver_task = api.SOLVER_INSTALL

    isupdate = bool(command == 'update')
    if isupdate:
        solver_task = api.SOLVER_UPDATE

    if newenv:
        ensure_name_or_prefix(args, command)
    prefix = context.target_prefix
    if newenv:
        check_prefix(prefix, json=context.json)
    if context.force_32bit and prefix == context.root_prefix:
        raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env")
    if isupdate and not (args.file or args.packages
                         or context.update_modifier == UpdateModifier.UPDATE_ALL):
        raise CondaValueError("""no package names supplied
# If you want to update to a newer version of Anaconda, type:
#
# $ conda update --prefix %s anaconda
""" % prefix)

    if not newenv:
        if isdir(prefix):
            delete_trash(prefix)
            if not isfile(join(prefix, 'conda-meta', 'history')):
                if paths_equal(prefix, context.conda_prefix):
                    raise NoBaseEnvironmentError()
                else:
github conda / conda / conda / core / index.py View on Github external
if not context.json and not context.quiet:
        stdoutlog.info("Fetching package metadata ...")

    CollectTask = namedtuple('CollectTask', ('url', 'schannel', 'priority'))
    tasks = [CollectTask(url, *cdata) for url, cdata in iteritems(channel_urls)]
    repodatas = collect_all_repodata(use_cache, tasks)
    # type: List[Sequence[str, Option[Dict[Dist, IndexRecord]]]]
    #   this is sorta a lie; actually more primitve types

    if index is None:
        index = {}
    for _, repodata in reversed(repodatas):
        if repodata:
            index.update(repodata.get('packages', {}))

    if not context.json:
        stdoutlog.info('\n')
    return index
github conda / conda / conda / core / link.py View on Github external
context,
                            ))
                        else:
                            error_results.append(UnknownPackageClobberError(
                                path,
                                axn.package_info.repodata_record.dist_str(),
                                context,
                            ))

        # Verification 2. there's only a single instance of each path
        for path, axns in iteritems(link_paths_dict):
            if len(axns) > 1:
                error_results.append(SharedLinkPathClobberError(
                    path,
                    tuple(axn.package_info.repodata_record.dist_str() for axn in axns),
                    context,
                ))
        return error_results
github conda / conda / conda / cli / main_clean.py View on Github external
def find_source_cache():
    cache_dirs = {
        'source cache': context.src_cache,
        'git cache': context.git_cache,
        'hg cache': context.hg_cache,
        'svn cache': context.svn_cache,
    }

    sizes = {}
    totalsize = 0
    for cache_type, cache_dir in cache_dirs.items():
        dirsize = 0
        for root, d, files in walk(cache_dir):
            for fn in files:
                size = lstat(join(root, fn)).st_size
                totalsize += size
                dirsize += size
        sizes[cache_type] = dirsize

    return {
        'warnings': [],
github conda / conda / conda / activate.py View on Github external
'activate': ActivateHelp(),
                'deactivate': DeactivateHelp(),
                'hook': GenericHelp('hook'),
                'commands': GenericHelp('commands'),
                'reactivate': GenericHelp('reactivate'),
            }
            raise help_classes[command]
        elif command not in ('activate', 'deactivate', 'reactivate', 'hook', 'commands'):
            from .exceptions import ArgumentError
            raise ArgumentError("invalid command '%s'" % command)

        if command.endswith('activate') or command == 'hook':
            try:
                dev_idx = remainder_args.index('--dev')
            except ValueError:
                context.dev = False
            else:
                del remainder_args[dev_idx]
                context.dev = True

        if command == 'activate':
            try:
                stack_idx = remainder_args.index('--stack')
            except ValueError:
                self.stack = False
            else:
                del remainder_args[stack_idx]
                self.stack = True
            if len(remainder_args) > 1:
                from .exceptions import ArgumentError
                raise ArgumentError(command + ' does not accept more than one argument:\n'
                                    + str(remainder_args) + '\n')
github conda / conda / conda / cli / main_info.py View on Github external
try:
        from conda_env import __version__ as conda_env_version
    except:  # pragma: no cover
        conda_env_version = "not installed"

    try:
        import conda_build
    except ImportError:  # pragma: no cover
        conda_build_version = "not installed"
    except Exception as e:  # pragma: no cover
        conda_build_version = "Error %s" % e
    else:  # pragma: no cover
        conda_build_version = conda_build.__version__

    channels = list(all_channel_urls(context.channels))
    if not context.json:
        channels = [c + ('' if offline_keep(c) else '  (offline)')
                    for c in channels]
    channels = [mask_anaconda_token(c) for c in channels]

    config_files = tuple(path for path in context.collect_all()
                         if path not in ('envvars', 'cmd_line'))

    netrc_file = os.environ.get('NETRC')
    if not netrc_file:
        user_netrc = expanduser("~/.netrc")
        if isfile(user_netrc):
            netrc_file = user_netrc

    active_prefix_name = env_name(context.active_prefix)
github conda / conda / conda / core / solve.py View on Github external
def get_pinned_specs(prefix):
    """Find pinned specs from file and return a tuple of MatchSpec."""
    pinfile = join(prefix, 'conda-meta', 'pinned')
    if exists(pinfile):
        with open(pinfile) as f:
            from_file = (i for i in f.read().strip().splitlines()
                         if i and not i.strip().startswith('#'))
    else:
        from_file = ()

    return tuple(MatchSpec(s, optional=True) for s in
                 concatv(context.pinned_packages, from_file))