diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 571fc5f04d6..8ba0806f74c 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -9,3 +9,6 @@ # pyupgrade generated (#11909) fc92ea3353cce71df24fac31ce333ec9f8cf7ba7 + +# black & isort auto format (#12554) +7679aed18153056f140e56da98011e27ebf4feb1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3db87c72642..c2323f53eec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,10 +1,12 @@ # disable autofixing PRs, commenting "pre-commit.ci autofix" on a pull request triggers a autofix ci: - autofix_prs: false + autofix_prs: false # generally speaking we ignore all vendored code as well as tests data +# ignore patches/diffs since slight reformatting can break them exclude: | (?x)^( conda/_vendor | + conda/auxlib | tests/data/( conda_format_repo | env_metadata/.+ | @@ -13,9 +15,11 @@ exclude: | tar_traversal | corrupt ) | + .*\.(patch|diff) | tools/vendoring/patches )/ repos: + # generic verification and formatting - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: @@ -29,30 +33,39 @@ repos: exclude: ^(conda\.)?recipe/meta.yaml # catch git merge/rebase problems - id: check-merge-conflict + # Python verification and formatting + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.4.2 + hooks: + # auto inject license blurb + - id: insert-license + files: \.py$ + args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] - repo: https://github.com/asottile/pyupgrade rev: v3.3.1 hooks: + # upgrade standard Python codes - id: pyupgrade - args: ["--py37-plus"] + args: [--py38-plus] exclude: ^conda/exports.py - - repo: https://github.com/akaihola/darker - rev: 1.7.1 + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + # auto sort Python imports + - id: isort + - repo: https://github.com/psf/black + rev: 23.3.0 hooks: - - id: darker - additional_dependencies: [black==22.10.0] - - repo: https://github.com/asottile/blacken-docs + # auto format Python codes + - id: black + - repo: https://github.com/adamchainz/blacken-docs rev: 1.13.0 hooks: + # auto format Python codes within docstrings - id: blacken-docs additional_dependencies: [black] - repo: https://github.com/PyCQA/flake8 rev: 6.0.0 hooks: + # lint Python codes - id: flake8 - - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.4.2 - hooks: - - id: insert-license - files: \.py$ - args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol] - exclude: ^conda/auxlib/ diff --git a/conda/__init__.py b/conda/__init__.py index 8e4c4e5ead7..bf97eb04ae6 100644 --- a/conda/__init__.py +++ b/conda/__init__.py @@ -1,18 +1,27 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause """OS-agnostic, system-level binary package manager.""" -from json import JSONEncoder import os -from os.path import abspath, dirname import sys +from json import JSONEncoder +from os.path import abspath, dirname from .__version__ import __version__ from .deprecations import deprecated - __all__ = ( - "__name__", "__version__", "__author__", "__email__", "__license__", "__summary__", "__url__", - "CONDA_PACKAGE_ROOT", "CondaError", "CondaMultiError", "CondaExitZero", "conda_signal_handler", + "__name__", + "__version__", + "__author__", + "__email__", + "__license__", + "__summary__", + "__url__", + "CONDA_PACKAGE_ROOT", + "CondaError", + "CondaMultiError", + "CondaExitZero", + "conda_signal_handler", "__copyright__", ) @@ -40,6 +49,7 @@ def another_to_unicode(val): return val + class CondaError(Exception): return_code = 1 reportable = False # Exception may be reported to core maintainers @@ -57,14 +67,16 @@ def __str__(self): try: return str(self.message % self._kwargs) except Exception: - debug_message = "\n".join(( - "class: " + self.__class__.__name__, - "message:", - self.message, - "kwargs:", - str(self._kwargs), - "", - )) + debug_message = "\n".join( + ( + "class: " + self.__class__.__name__, + "message:", + self.message, + "kwargs:", + str(self._kwargs), + "", + ) + ) print(debug_message, file=sys.stderr) raise @@ -76,13 +88,12 @@ def dump_map(self): message=str(self), error=repr(self), caused_by=repr(self._caused_by), - **self._kwargs + **self._kwargs, ) return result class CondaMultiError(CondaError): - def __init__(self, errors): self.errors = errors super().__init__(None) @@ -97,18 +108,19 @@ def __repr__(self): # by using e.__repr__() instead of repr(e) # https://github.com/scrapy/cssselect/issues/34 errs.append(e.__repr__()) - res = '\n'.join(errs) + res = "\n".join(errs) return res def __str__(self): return "\n".join(str(e) for e in self.errors) + "\n" def dump_map(self): - return dict(exception_type=str(type(self)), - exception_name=self.__class__.__name__, - errors=tuple(error.dump_map() for error in self.errors), - error="Multiple Errors Encountered.", - ) + return dict( + exception_type=str(type(self)), + exception_name=self.__class__.__name__, + errors=tuple(error.dump_map() for error in self.errors), + error="Multiple Errors Encountered.", + ) def contains(self, exception_class): return any(isinstance(e, exception_class) for e in self.errors) @@ -130,6 +142,7 @@ def conda_signal_handler(signum, frame): p.send_signal(signum) from .exceptions import CondaSignalInterrupt + raise CondaSignalInterrupt(signum) diff --git a/conda/activate.py b/conda/activate.py index 836a3a0800b..0d7293f3db9 100644 --- a/conda/activate.py +++ b/conda/activate.py @@ -1,14 +1,22 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from __future__ import annotations -from errno import ENOENT import json import os -from os.path import abspath, basename, dirname, expanduser, expandvars, isdir, join, exists import re import sys +from errno import ENOENT +from os.path import ( + abspath, + basename, + dirname, + exists, + expanduser, + expandvars, + isdir, + join, +) from textwrap import dedent from typing import Iterable @@ -16,7 +24,11 @@ # conda.base.context is fair game, but nothing more. from . import CONDA_PACKAGE_ROOT, CONDA_SOURCE_ROOT from .auxlib.compat import Utf8NamedTemporaryFile -from .base.constants import PREFIX_STATE_FILE, PACKAGE_ENV_VARS_DIR, CONDA_ENV_VARS_UNSET_VAR +from .base.constants import ( + CONDA_ENV_VARS_UNSET_VAR, + PACKAGE_ENV_VARS_DIR, + PREFIX_STATE_FILE, +) from .base.context import ROOT_ENV_NAME, context, locate_prefix_by_name from .common.compat import FILESYSTEM_ENCODING, on_win from .common.path import paths_equal @@ -53,7 +65,9 @@ def path_conversion(self, paths: Iterable[str]): raise NotImplementedError() script_extension = None - tempfile_extension = None # None means write instructions to stdout rather than a temp file + tempfile_extension = ( + None # None means write instructions to stdout rather than a temp file + ) command_join: str unset_var_tmpl = None @@ -93,7 +107,9 @@ def get_export_unset_vars(self, export_metavars=True, **kwargs): if value is None: unset_vars.append(name.upper()) else: - export_vars[name.upper()] = self.path_conversion(value) if value else value + export_vars[name.upper()] = ( + self.path_conversion(value) if value else value + ) else: # unset all meta variables unset_vars.extend(context.conda_exe_vars_dict) @@ -141,7 +157,9 @@ def activate(self): builder_result = self.build_stack(self.env_name_or_prefix) else: builder_result = self.build_activate(self.env_name_or_prefix) - return self._finalize(self._yield_commands(builder_result), self.tempfile_extension) + return self._finalize( + self._yield_commands(builder_result), self.tempfile_extension + ) def deactivate(self): return self._finalize( @@ -158,7 +176,11 @@ def hook(self, auto_activate_base=None): builder.append(self._hook_preamble()) with open(self.hook_source_path) as fsrc: builder.append(fsrc.read()) - if auto_activate_base is None and context.auto_activate_base or auto_activate_base: + if ( + auto_activate_base is None + and context.auto_activate_base + or auto_activate_base + ): builder.append("conda activate base\n") postamble = self._hook_postamble() if postamble is not None: @@ -177,13 +199,15 @@ def commands(self): This method is generally only used by tab-completion. """ # Import locally to reduce impact on initialization time. + from .cli.conda_argparse import find_builtin_commands, generate_parser from .cli.find_commands import find_commands - from .cli.conda_argparse import generate_parser, find_builtin_commands # return value meant to be written to stdout # Hidden commands to provide metadata to shells. return "\n".join( - sorted(find_builtin_commands(generate_parser()) + tuple(find_commands(True))) + sorted( + find_builtin_commands(generate_parser()) + tuple(find_commands(True)) + ) ) def _hook_preamble(self): @@ -227,7 +251,13 @@ def raise_invalid_command_error(actual_command=None): "reactivate": GenericHelp("reactivate"), } raise help_classes[command] - elif command not in ("activate", "deactivate", "reactivate", "hook", "commands"): + elif command not in ( + "activate", + "deactivate", + "reactivate", + "hook", + "commands", + ): raise_invalid_command_error(actual_command=command) if command.endswith("activate") or command == "hook": @@ -252,7 +282,9 @@ def raise_invalid_command_error(actual_command=None): if stack_idx >= 0 and no_stack_idx >= 0: from .exceptions import ArgumentError - raise ArgumentError("cannot specify both --stack and --no-stack to " + command) + raise ArgumentError( + "cannot specify both --stack and --no-stack to " + command + ) if stack_idx >= 0: self.stack = True del remainder_args[stack_idx] @@ -339,9 +371,14 @@ def _build_activate_stack(self, env_name_or_prefix, stack): # get clobbered environment variables clobber_vars = set(env_vars.keys()).intersection(os.environ.keys()) - clobber_vars = set(filter(lambda var: env_vars[var] != os.environ[var], clobber_vars)) + clobber_vars = set( + filter(lambda var: env_vars[var] != os.environ[var], clobber_vars) + ) if clobber_vars: - print("WARNING: overwriting environment variables set in the machine", file=sys.stderr) + print( + "WARNING: overwriting environment variables set in the machine", + file=sys.stderr, + ) print(f"overwriting variable {clobber_vars}", file=sys.stderr) for name in clobber_vars: env_vars[f"__CONDA_SHLVL_{old_conda_shlvl}_{name}"] = os.environ.get(name) @@ -372,7 +409,9 @@ def _build_activate_stack(self, env_name_or_prefix, stack): deactivate_scripts = () else: export_vars, unset_vars = self.get_export_unset_vars( - path=self.pathsep_join(self._replace_prefix_in_path(old_conda_prefix, prefix)), + path=self.pathsep_join( + self._replace_prefix_in_path(old_conda_prefix, prefix) + ), conda_prefix=prefix, conda_shlvl=conda_shlvl, conda_default_env=conda_default_env, @@ -411,12 +450,16 @@ def build_deactivate(self): "activate_scripts": (), } deactivate_scripts = self._get_deactivate_scripts(old_conda_prefix) - old_conda_environment_env_vars = self._get_environment_env_vars(old_conda_prefix) + old_conda_environment_env_vars = self._get_environment_env_vars( + old_conda_prefix + ) new_conda_shlvl = old_conda_shlvl - 1 set_vars = {} if old_conda_shlvl == 1: - new_path = self.pathsep_join(self._remove_prefix_from_path(old_conda_prefix)) + new_path = self.pathsep_join( + self._remove_prefix_from_path(old_conda_prefix) + ) # You might think that you can remove the CONDA_EXE vars with export_metavars=False # here so that "deactivate means deactivate" but you cannot since the conda shell # scripts still refer to them and they only set them once at the top. We could change @@ -447,7 +490,9 @@ def build_deactivate(self): unset_vars = ["CONDA_PREFIX_%d" % new_conda_shlvl] if old_prefix_stacked: - new_path = self.pathsep_join(self._remove_prefix_from_path(old_conda_prefix)) + new_path = self.pathsep_join( + self._remove_prefix_from_path(old_conda_prefix) + ) unset_vars.append("CONDA_STACKED_%d" % old_conda_shlvl) else: new_path = self.pathsep_join( @@ -497,8 +542,12 @@ def build_reactivate(self): "deactivate_scripts": (), "activate_scripts": (), } - conda_default_env = self.environ.get("CONDA_DEFAULT_ENV", self._default_env(conda_prefix)) - new_path = self.pathsep_join(self._replace_prefix_in_path(conda_prefix, conda_prefix)) + conda_default_env = self.environ.get( + "CONDA_DEFAULT_ENV", self._default_env(conda_prefix) + ) + new_path = self.pathsep_join( + self._replace_prefix_in_path(conda_prefix, conda_prefix) + ) set_vars = {} conda_prompt_modifier = self._prompt_modifier(conda_prefix, conda_default_env) if context.changeps1: @@ -508,7 +557,9 @@ def build_reactivate(self): env_vars_to_export = { "PATH": new_path, "CONDA_SHLVL": conda_shlvl, - "CONDA_PROMPT_MODIFIER": self._prompt_modifier(conda_prefix, conda_default_env), + "CONDA_PROMPT_MODIFIER": self._prompt_modifier( + conda_prefix, conda_default_env + ), } conda_environment_env_vars = self._get_environment_env_vars(conda_prefix) for k, v in conda_environment_env_vars.items(): @@ -540,7 +591,8 @@ def _get_starting_path_list(self): "C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\", } path = self.environ.get( - "PATH", clean_paths[sys.platform] if sys.platform in clean_paths else "/usr/bin" + "PATH", + clean_paths[sys.platform] if sys.platform in clean_paths else "/usr/bin", ) path_split = path.split(os.pathsep) return path_split @@ -615,7 +667,7 @@ def index_of_path(paths, test_path): ) if path_list[last_idx + 1] == library_bin_dir: last_idx += 1 - del path_list[first_idx:last_idx + 1] + del path_list[first_idx : last_idx + 1] else: first_idx = 0 @@ -642,7 +694,9 @@ def _prompt_modifier(self, prefix, conda_default_env): if i == old_shlvl: env_i = self._default_env(self.environ.get("CONDA_PREFIX", "")) else: - env_i = self._default_env(self.environ.get(f"CONDA_PREFIX_{i}", "").rstrip()) + env_i = self._default_env( + self.environ.get(f"CONDA_PREFIX_{i}", "").rstrip() + ) stacked_i = bool(self.environ.get(f"CONDA_STACKED_{i}", "").rstrip()) env_stack.append(env_i) if not stacked_i: @@ -655,7 +709,9 @@ def _prompt_modifier(self, prefix, conda_default_env): if deactivate: prompt_stack = prompt_stack[0:-1] env_stack = env_stack[0:-1] - stacked = bool(self.environ.get(f"CONDA_STACKED_{old_shlvl}", "").rstrip()) + stacked = bool( + self.environ.get(f"CONDA_STACKED_{old_shlvl}", "").rstrip() + ) if not stacked and env_stack: prompt_stack.append(env_stack[-1]) elif reactivate: @@ -682,18 +738,22 @@ def _get_activate_scripts(self, prefix): se_len = -len(_script_extension) try: paths = ( - entry.path for entry in os.scandir(join(prefix, "etc", "conda", "activate.d")) + entry.path + for entry in os.scandir(join(prefix, "etc", "conda", "activate.d")) ) except OSError: return () - return self.path_conversion(sorted(p for p in paths if p[se_len:] == _script_extension)) + return self.path_conversion( + sorted(p for p in paths if p[se_len:] == _script_extension) + ) def _get_deactivate_scripts(self, prefix): _script_extension = self.script_extension se_len = -len(_script_extension) try: paths = ( - entry.path for entry in os.scandir(join(prefix, "etc", "conda", "deactivate.d")) + entry.path + for entry in os.scandir(join(prefix, "etc", "conda", "deactivate.d")) ) except OSError: return () @@ -708,7 +768,9 @@ def _get_environment_env_vars(self, prefix): # First get env vars from packages if exists(pkg_env_var_dir): - for pkg_env_var_path in sorted(entry.path for entry in os.scandir(pkg_env_var_dir)): + for pkg_env_var_path in sorted( + entry.path for entry in os.scandir(pkg_env_var_dir) + ): with open(pkg_env_var_path) as f: env_vars.update(json.loads(f.read())) @@ -717,7 +779,9 @@ def _get_environment_env_vars(self, prefix): with open(env_vars_file) as f: prefix_state = json.loads(f.read()) prefix_state_env_vars = prefix_state.get("env_vars", {}) - dup_vars = [ev for ev in env_vars.keys() if ev in prefix_state_env_vars.keys()] + dup_vars = [ + ev for ev in env_vars.keys() if ev in prefix_state_env_vars.keys() + ] for dup in dup_vars: print( "WARNING: duplicate env vars detected. Vars from the environment " @@ -756,7 +820,8 @@ def native_path_to_unix(paths): # pragma: unix no cover return path_identity(paths) if paths is None: return None - from subprocess import CalledProcessError, PIPE, Popen + from subprocess import PIPE, CalledProcessError, Popen + from conda.auxlib.compat import shlex_split_unicode # It is very easy to end up with a bash in one place and a cygpath in another due to e.g. @@ -781,15 +846,23 @@ def native_path_to_unix(paths): # pragma: unix no cover except OSError as e: if e.errno != ENOENT: raise + # This code path should (hopefully) never be hit be real conda installs. It's here # as a backup for tests run under cmd.exe with cygpath not available. def _translation(found_path): # NOQA - found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/") + found = ( + found_path.group(1) + .replace("\\", "/") + .replace(":", "") + .replace("//", "/") + ) return "/" + found.rstrip("/") joined = ensure_fs_path_encoding(joined) stdout = ( - re.sub(r"([a-zA-Z]:[\/\\\\]+(?:[^:*?\"<>|;]+[\/\\\\]*)*)", _translation, joined) + re.sub( + r"([a-zA-Z]:[\/\\\\]+(?:[^:*?\"<>|;]+[\/\\\\]*)*)", _translation, joined + ) .replace(";/", ":/") .rstrip(";") ) @@ -822,7 +895,9 @@ def __init__(self, arguments=None): self.sep = "/" self.path_conversion = native_path_to_unix self.script_extension = ".sh" - self.tempfile_extension = None # write instructions to stdout rather than a temp file + self.tempfile_extension = ( + None # write instructions to stdout rather than a temp file + ) self.command_join = "\n" self.unset_var_tmpl = "unset %s" @@ -830,7 +905,9 @@ def __init__(self, arguments=None): self.set_var_tmpl = "%s='%s'" self.run_script_tmpl = '. "%s"' - self.hook_source_path = join(CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.sh") + self.hook_source_path = join( + CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.sh" + ) super().__init__(arguments) @@ -863,7 +940,8 @@ def _hook_preamble(self): else: if key in ("PYTHONPATH", "CONDA_EXE"): result += ( - join(self.export_var_tmpl % (key, self.path_conversion(value))) + "\n" + join(self.export_var_tmpl % (key, self.path_conversion(value))) + + "\n" ) else: result += join(self.export_var_tmpl % (key, value)) + "\n" @@ -876,7 +954,9 @@ def __init__(self, arguments=None): self.sep = "/" self.path_conversion = native_path_to_unix self.script_extension = ".csh" - self.tempfile_extension = None # write instructions to stdout rather than a temp file + self.tempfile_extension = ( + None # write instructions to stdout rather than a temp file + ) self.command_join = ";\n" self.unset_var_tmpl = "unsetenv %s" @@ -884,7 +964,9 @@ def __init__(self, arguments=None): self.set_var_tmpl = "set %s='%s'" self.run_script_tmpl = 'source "%s"' - self.hook_source_path = join(CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.csh") + self.hook_source_path = join( + CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.csh" + ) super().__init__(arguments) @@ -940,7 +1022,9 @@ def __init__(self, arguments=None): self.unset_var_tmpl = "del $%s" self.export_var_tmpl = "$%s = '%s'" - self.set_var_tmpl = "$%s = '%s'" # TODO: determine if different than export_var_tmpl + self.set_var_tmpl = ( + "$%s = '%s'" # TODO: determine if different than export_var_tmpl + ) # 'scripts' really refer to de/activation scripts, not scripts in the language per se # xonsh can piggy-back activation scripts from other languages depending on the platform @@ -972,7 +1056,9 @@ def __init__(self, arguments=None): self.unset_var_tmpl = "@SET %s=" self.export_var_tmpl = '@SET "%s=%s"' - self.set_var_tmpl = '@SET "%s=%s"' # TODO: determine if different than export_var_tmpl + self.set_var_tmpl = ( + '@SET "%s=%s"' # TODO: determine if different than export_var_tmpl + ) self.run_script_tmpl = '@CALL "%s"' self.hook_source_path = None @@ -992,7 +1078,9 @@ def __init__(self, arguments=None): self.sep = "/" self.path_conversion = native_path_to_unix self.script_extension = ".fish" - self.tempfile_extension = None # write instructions to stdout rather than a temp file + self.tempfile_extension = ( + None # write instructions to stdout rather than a temp file + ) self.command_join = ";\n" self.unset_var_tmpl = "set -e %s" @@ -1033,7 +1121,9 @@ def __init__(self, arguments=None): self.sep = "\\" if on_win else "/" self.path_conversion = path_identity self.script_extension = ".ps1" - self.tempfile_extension = None # write instructions to stdout rather than a temp file + self.tempfile_extension = ( + None # write instructions to stdout rather than a temp file + ) self.command_join = "\n" self.unset_var_tmpl = '$Env:%s = ""' @@ -1041,7 +1131,9 @@ def __init__(self, arguments=None): self.set_var_tmpl = '$Env:%s = "%s"' self.run_script_tmpl = '. "%s"' - self.hook_source_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda-hook.ps1") + self.hook_source_path = join( + CONDA_PACKAGE_ROOT, "shell", "condabin", "conda-hook.ps1" + ) super().__init__(arguments) @@ -1079,7 +1171,9 @@ class JSONFormatMixin(_Activator): def __init__(self, arguments=None): self.pathsep_join = list - self.tempfile_extension = None # write instructions to stdout rather than a temp file + self.tempfile_extension = ( + None # write instructions to stdout rather than a temp file + ) self.command_join = list super().__init__(arguments) diff --git a/conda/api.py b/conda/api.py index 2954c3da3ee..08f35776944 100644 --- a/conda/api.py +++ b/conda/api.py @@ -1,7 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from .base.constants import DepsModifier as _DepsModifier, UpdateModifier as _UpdateModifier +from .base.constants import DepsModifier as _DepsModifier +from .base.constants import UpdateModifier as _UpdateModifier from .base.context import context from .common.constants import NULL from .core.package_cache_data import PackageCacheData as _PackageCacheData @@ -29,7 +29,9 @@ class Solver: """ - def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=()): + def __init__( + self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=() + ): """ **Beta** @@ -48,10 +50,18 @@ def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remov """ solver_backend = context.plugin_manager.get_cached_solver_backend() - self._internal = solver_backend(prefix, channels, subdirs, specs_to_add, specs_to_remove) - - def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL): + self._internal = solver_backend( + prefix, channels, subdirs, specs_to_add, specs_to_remove + ) + + def solve_final_state( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + ): """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -84,11 +94,19 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL the solved state of the environment. """ - return self._internal.solve_final_state(update_modifier, deps_modifier, prune, - ignore_pinned, force_remove) + return self._internal.solve_final_state( + update_modifier, deps_modifier, prune, ignore_pinned, force_remove + ) - def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=False): + def solve_for_diff( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=False, + ): """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -118,11 +136,24 @@ def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, dependency order from roots to leaves. """ - return self._internal.solve_for_diff(update_modifier, deps_modifier, prune, ignore_pinned, - force_remove, force_reinstall) - - def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=False): + return self._internal.solve_for_diff( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + force_reinstall, + ) + + def solve_for_transaction( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=False, + ): """ **Beta** While in beta, expect both major and minor changes across minor releases. @@ -145,8 +176,14 @@ def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune= UnlinkLinkTransaction: """ - return self._internal.solve_for_transaction(update_modifier, deps_modifier, prune, - ignore_pinned, force_remove, force_reinstall) + return self._internal.solve_for_transaction( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + force_reinstall, + ) class SubdirData: @@ -210,7 +247,9 @@ def query_all(package_ref_or_match_spec, channels=None, subdirs=None): Tuple[PackageRecord] """ - return tuple(_SubdirData.query_all(package_ref_or_match_spec, channels, subdirs)) + return tuple( + _SubdirData.query_all(package_ref_or_match_spec, channels, subdirs) + ) def iter_records(self): """ diff --git a/conda/base/constants.py b/conda/base/constants.py index d0d875bb710..14822fdd8d6 100644 --- a/conda/base/constants.py +++ b/conda/base/constants.py @@ -8,61 +8,63 @@ Another important source of "static" configuration is conda/models/enums.py. """ +import struct from enum import Enum, EnumMeta from os.path import join -import struct from ..common.compat import on_win, six_with_metaclass -PREFIX_PLACEHOLDER = ('/opt/anaconda1anaconda2' - # this is intentionally split into parts, such that running - # this program on itself will leave it unchanged - 'anaconda3') +PREFIX_PLACEHOLDER = ( + "/opt/anaconda1anaconda2" + # this is intentionally split into parts, such that running + # this program on itself will leave it unchanged + "anaconda3" +) machine_bits = 8 * struct.calcsize("P") -APP_NAME = 'conda' +APP_NAME = "conda" if on_win: # pragma: no cover SEARCH_PATH = ( - 'C:/ProgramData/conda/.condarc', - 'C:/ProgramData/conda/condarc', - 'C:/ProgramData/conda/condarc.d', + "C:/ProgramData/conda/.condarc", + "C:/ProgramData/conda/condarc", + "C:/ProgramData/conda/condarc.d", ) else: SEARCH_PATH = ( - '/etc/conda/.condarc', - '/etc/conda/condarc', - '/etc/conda/condarc.d/', - '/var/lib/conda/.condarc', - '/var/lib/conda/condarc', - '/var/lib/conda/condarc.d/', + "/etc/conda/.condarc", + "/etc/conda/condarc", + "/etc/conda/condarc.d/", + "/var/lib/conda/.condarc", + "/var/lib/conda/condarc", + "/var/lib/conda/condarc.d/", ) SEARCH_PATH += ( - '$CONDA_ROOT/.condarc', - '$CONDA_ROOT/condarc', - '$CONDA_ROOT/condarc.d/', - '$XDG_CONFIG_HOME/conda/.condarc', - '$XDG_CONFIG_HOME/conda/condarc', - '$XDG_CONFIG_HOME/conda/condarc.d/', - '~/.config/conda/.condarc', - '~/.config/conda/condarc', - '~/.config/conda/condarc.d/', - '~/.conda/.condarc', - '~/.conda/condarc', - '~/.conda/condarc.d/', - '~/.condarc', - '$CONDA_PREFIX/.condarc', - '$CONDA_PREFIX/condarc', - '$CONDA_PREFIX/condarc.d/', - '$CONDARC', + "$CONDA_ROOT/.condarc", + "$CONDA_ROOT/condarc", + "$CONDA_ROOT/condarc.d/", + "$XDG_CONFIG_HOME/conda/.condarc", + "$XDG_CONFIG_HOME/conda/condarc", + "$XDG_CONFIG_HOME/conda/condarc.d/", + "~/.config/conda/.condarc", + "~/.config/conda/condarc", + "~/.config/conda/condarc.d/", + "~/.conda/.condarc", + "~/.conda/condarc", + "~/.conda/condarc.d/", + "~/.condarc", + "$CONDA_PREFIX/.condarc", + "$CONDA_PREFIX/condarc", + "$CONDA_PREFIX/condarc.d/", + "$CONDARC", ) -DEFAULT_CHANNEL_ALIAS = 'https://conda.anaconda.org' -CONDA_HOMEPAGE_URL = 'https://conda.io' -ERROR_UPLOAD_URL = 'https://conda.io/conda-post/unexpected-error' -DEFAULTS_CHANNEL_NAME = 'defaults' +DEFAULT_CHANNEL_ALIAS = "https://conda.anaconda.org" +CONDA_HOMEPAGE_URL = "https://conda.io" +ERROR_UPLOAD_URL = "https://conda.io/conda-post/unexpected-error" +DEFAULTS_CHANNEL_NAME = "defaults" KNOWN_SUBDIRS = PLATFORM_DIRECTORIES = ( "noarch", @@ -83,61 +85,61 @@ "zos-z", ) -RECOGNIZED_URL_SCHEMES = ('http', 'https', 'ftp', 's3', 'file') +RECOGNIZED_URL_SCHEMES = ("http", "https", "ftp", "s3", "file") DEFAULT_CHANNELS_UNIX = ( - 'https://repo.anaconda.com/pkgs/main', - 'https://repo.anaconda.com/pkgs/r', + "https://repo.anaconda.com/pkgs/main", + "https://repo.anaconda.com/pkgs/r", ) DEFAULT_CHANNELS_WIN = ( - 'https://repo.anaconda.com/pkgs/main', - 'https://repo.anaconda.com/pkgs/r', - 'https://repo.anaconda.com/pkgs/msys2', + "https://repo.anaconda.com/pkgs/main", + "https://repo.anaconda.com/pkgs/r", + "https://repo.anaconda.com/pkgs/msys2", ) DEFAULT_CUSTOM_CHANNELS = { - 'pkgs/pro': 'https://repo.anaconda.com', + "pkgs/pro": "https://repo.anaconda.com", } DEFAULT_CHANNELS = DEFAULT_CHANNELS_WIN if on_win else DEFAULT_CHANNELS_UNIX -ROOT_ENV_NAME = 'base' +ROOT_ENV_NAME = "base" ROOT_NO_RM = ( - 'python', - 'pycosat', - 'ruamel.yaml', - 'conda', - 'openssl', - 'requests', + "python", + "pycosat", + "ruamel.yaml", + "conda", + "openssl", + "requests", ) DEFAULT_AGGRESSIVE_UPDATE_PACKAGES = ( - 'ca-certificates', - 'certifi', - 'openssl', + "ca-certificates", + "certifi", + "openssl", ) if on_win: # pragma: no cover COMPATIBLE_SHELLS = ( - 'bash', - 'cmd.exe', - 'fish', - 'tcsh', - 'xonsh', - 'zsh', - 'powershell', + "bash", + "cmd.exe", + "fish", + "tcsh", + "xonsh", + "zsh", + "powershell", ) else: COMPATIBLE_SHELLS = ( - 'bash', - 'fish', - 'tcsh', - 'xonsh', - 'zsh', - 'powershell', + "bash", + "fish", + "tcsh", + "xonsh", + "zsh", + "powershell", ) @@ -151,7 +153,7 @@ CONDA_PACKAGE_EXTENSION_V1, ) CONDA_TARBALL_EXTENSION = CONDA_PACKAGE_EXTENSION_V1 # legacy support for conda-build; remove this line # NOQA -CONDA_TEMP_EXTENSION = '.c~' +CONDA_TEMP_EXTENSION = ".c~" CONDA_TEMP_EXTENSIONS = (CONDA_TEMP_EXTENSION, ".trash") CONDA_LOGS_DIR = ".logs" @@ -175,18 +177,18 @@ class SafetyChecks(Enum): - disabled = 'disabled' - warn = 'warn' - enabled = 'enabled' + disabled = "disabled" + warn = "warn" + enabled = "enabled" def __str__(self): return self.value class PathConflict(Enum): - clobber = 'clobber' - warn = 'warn' - prevent = 'prevent' + clobber = "clobber" + warn = "warn" + prevent = "prevent" def __str__(self): return self.value @@ -194,20 +196,23 @@ def __str__(self): class DepsModifier(Enum): """Flags to enable alternate handling of dependencies.""" - NOT_SET = 'not_set' # default - NO_DEPS = 'no_deps' - ONLY_DEPS = 'only_deps' + + NOT_SET = "not_set" # default + NO_DEPS = "no_deps" + ONLY_DEPS = "only_deps" def __str__(self): return self.value class UpdateModifier(Enum): - SPECS_SATISFIED_SKIP_SOLVE = 'specs_satisfied_skip_solve' - FREEZE_INSTALLED = 'freeze_installed' # freeze is a better name for --no-update-deps - UPDATE_DEPS = 'update_deps' - UPDATE_SPECS = 'update_specs' # default - UPDATE_ALL = 'update_all' + SPECS_SATISFIED_SKIP_SOLVE = "specs_satisfied_skip_solve" + FREEZE_INSTALLED = ( + "freeze_installed" # freeze is a better name for --no-update-deps + ) + UPDATE_DEPS = "update_deps" + UPDATE_SPECS = "update_specs" # default + UPDATE_ALL = "update_all" # TODO: add REINSTALL_ALL, see https://github.com/conda/conda/issues/6247 and https://github.com/conda/conda/issues/3149 # NOQA def __str__(self): @@ -215,16 +220,16 @@ def __str__(self): class ChannelPriorityMeta(EnumMeta): - def __call__(cls, value, *args, **kwargs): try: return super().__call__(value, *args, **kwargs) except ValueError: if isinstance(value, str): from ..auxlib.type_coercion import typify + value = typify(value) if value is True: - value = 'flexible' + value = "flexible" elif value is False: value = cls.DISABLED return super().__call__(value, *args, **kwargs) @@ -240,16 +245,16 @@ def __str__(self): class ChannelPriority(six_with_metaclass(ChannelPriorityMeta, ValueEnum)): __name__ = "ChannelPriority" - STRICT = 'strict' + STRICT = "strict" # STRICT_OR_FLEXIBLE = 'strict_or_flexible' # TODO: consider implementing if needed - FLEXIBLE = 'flexible' - DISABLED = 'disabled' + FLEXIBLE = "flexible" + DISABLED = "disabled" class SatSolverChoice(ValueEnum): - PYCOSAT = 'pycosat' - PYCRYPTOSAT = 'pycryptosat' - PYSAT = 'pysat' + PYCOSAT = "pycosat" + PYCRYPTOSAT = "pycryptosat" + PYSAT = "pysat" #: The name of the default solver, currently "classic" @@ -263,11 +268,11 @@ class NoticeLevel(ValueEnum): # Magic files for permissions determination -PACKAGE_CACHE_MAGIC_FILE = 'urls.txt' -PREFIX_MAGIC_FILE = join('conda-meta', 'history') +PACKAGE_CACHE_MAGIC_FILE = "urls.txt" +PREFIX_MAGIC_FILE = join("conda-meta", "history") -PREFIX_STATE_FILE = join('conda-meta', 'state') -PACKAGE_ENV_VARS_DIR = join('etc', 'conda', 'env_vars.d') +PREFIX_STATE_FILE = join("conda-meta", "state") +PACKAGE_ENV_VARS_DIR = join("etc", "conda", "env_vars.d") CONDA_ENV_VARS_UNSET_VAR = "***unset***" diff --git a/conda/base/context.py b/conda/base/context.py index 4f5f46392a7..356a11b012c 100644 --- a/conda/base/context.py +++ b/conda/base/context.py @@ -1,60 +1,66 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import os +import platform +import struct +import sys +from contextlib import contextmanager from errno import ENOENT from functools import lru_cache from itertools import chain from logging import getLogger +from os.path import abspath, expanduser, isdir, isfile, join +from os.path import split as path_split from typing import Optional -import os -from os.path import abspath, expanduser, isdir, isfile, join, split as path_split -import platform -import sys -import struct -from contextlib import contextmanager try: from boltons.setutils import IndexedSet except ImportError: # pragma: no cover from .._vendor.boltons.setutils import IndexedSet +from .. import CONDA_SOURCE_ROOT +from .. import __version__ as CONDA_VERSION +from .._vendor.appdirs import user_data_dir +from .._vendor.frozendict import frozendict +from ..auxlib.decorators import memoizedproperty +from ..auxlib.ish import dals +from ..common._os.linux import linux_get_libc_version +from ..common.compat import NoneType, on_win +from ..common.configuration import ( + Configuration, + ConfigurationLoadError, + MapParameter, + ParameterLoader, + PrimitiveParameter, + SequenceParameter, + ValidationError, +) +from ..common.iterators import unique +from ..common.path import expand, paths_equal +from ..common.url import has_scheme, path_to_url, split_scheme_auth_token +from ..deprecations import deprecated from .constants import ( APP_NAME, - ChannelPriority, - DEFAULTS_CHANNEL_NAME, - REPODATA_FN, DEFAULT_AGGRESSIVE_UPDATE_PACKAGES, - DEFAULT_CHANNELS, DEFAULT_CHANNEL_ALIAS, + DEFAULT_CHANNELS, DEFAULT_CUSTOM_CHANNELS, DEFAULT_SOLVER, - DepsModifier, + DEFAULTS_CHANNEL_NAME, ERROR_UPLOAD_URL, KNOWN_SUBDIRS, PREFIX_MAGIC_FILE, - PathConflict, + PREFIX_NAME_DISALLOWED_CHARS, + REPODATA_FN, ROOT_ENV_NAME, SEARCH_PATH, + ChannelPriority, + DepsModifier, + PathConflict, SafetyChecks, SatSolverChoice, UpdateModifier, - PREFIX_NAME_DISALLOWED_CHARS, ) -from .. import __version__ as CONDA_VERSION -from ..deprecations import deprecated -from .._vendor.appdirs import user_data_dir -from ..auxlib.decorators import memoizedproperty -from ..auxlib.ish import dals -from .._vendor.frozendict import frozendict -from ..common.compat import NoneType, on_win -from ..common.configuration import (Configuration, ConfigurationLoadError, MapParameter, - ParameterLoader, PrimitiveParameter, SequenceParameter, - ValidationError) -from ..common.iterators import unique -from ..common._os.linux import linux_get_libc_version -from ..common.path import expand, paths_equal -from ..common.url import has_scheme, path_to_url, split_scheme_auth_token - -from .. import CONDA_SOURCE_ROOT try: os.getcwd() @@ -69,41 +75,41 @@ log = getLogger(__name__) _platform_map = { - 'linux2': 'linux', - 'linux': 'linux', - 'darwin': 'osx', - 'win32': 'win', - 'zos': 'zos', + "linux2": "linux", + "linux": "linux", + "darwin": "osx", + "win32": "win", + "zos": "zos", } non_x86_machines = { - 'armv6l', - 'armv7l', - 'aarch64', - 'arm64', - 'ppc64', - 'ppc64le', - 'riscv64', - 's390x', + "armv6l", + "armv7l", + "aarch64", + "arm64", + "ppc64", + "ppc64le", + "riscv64", + "s390x", } _arch_names = { - 32: 'x86', - 64: 'x86_64', + 32: "x86", + 64: "x86_64", } -user_rc_path = abspath(expanduser('~/.condarc')) -sys_rc_path = join(sys.prefix, '.condarc') +user_rc_path = abspath(expanduser("~/.condarc")) +sys_rc_path = join(sys.prefix, ".condarc") def mockable_context_envs_dirs(root_writable, root_prefix, _envs_dirs): if root_writable: fixed_dirs = [ - join(root_prefix, 'envs'), - join('~', '.conda', 'envs'), + join(root_prefix, "envs"), + join("~", ".conda", "envs"), ] else: fixed_dirs = [ - join('~', '.conda', 'envs'), - join(root_prefix, 'envs'), + join("~", ".conda", "envs"), + join(root_prefix, "envs"), ] if on_win: fixed_dirs.append(join(user_data_dir(APP_NAME, APP_NAME), "envs")) @@ -118,12 +124,12 @@ def channel_alias_validation(value): def default_python_default(): ver = sys.version_info - return '%d.%d' % (ver.major, ver.minor) + return "%d.%d" % (ver.major, ver.minor) def default_python_validation(value): if value: - if len(value) >= 3 and value[1] == '.': + if len(value) >= 3 and value[1] == ".": try: value = float(value) if 2.0 <= value < 4.0: @@ -140,20 +146,23 @@ def default_python_validation(value): def ssl_verify_validation(value): if isinstance(value, str): if not isfile(value) and not isdir(value): - return ("ssl_verify value '%s' must be a boolean, a path to a " - "certificate bundle file, or a path to a directory containing " - "certificates of trusted CAs." % value) + return ( + "ssl_verify value '%s' must be a boolean, a path to a " + "certificate bundle file, or a path to a directory containing " + "certificates of trusted CAs." % value + ) return True class Context(Configuration): - add_pip_as_python_dependency = ParameterLoader(PrimitiveParameter(True)) allow_conda_downgrades = ParameterLoader(PrimitiveParameter(False)) # allow cyclical dependencies, or raise allow_cycles = ParameterLoader(PrimitiveParameter(True)) allow_softlinks = ParameterLoader(PrimitiveParameter(False)) - auto_update_conda = ParameterLoader(PrimitiveParameter(True), aliases=('self_update',)) + auto_update_conda = ParameterLoader( + PrimitiveParameter(True), aliases=("self_update",) + ) auto_activate_base = ParameterLoader(PrimitiveParameter(True)) auto_stack = ParameterLoader(PrimitiveParameter(0)) notify_outdated_conda = ParameterLoader(PrimitiveParameter(True)) @@ -161,11 +170,15 @@ class Context(Configuration): changeps1 = ParameterLoader(PrimitiveParameter(True)) env_prompt = ParameterLoader(PrimitiveParameter("({default_env}) ")) create_default_packages = ParameterLoader( - SequenceParameter(PrimitiveParameter("", element_type=str))) + SequenceParameter(PrimitiveParameter("", element_type=str)) + ) default_python = ParameterLoader( - PrimitiveParameter(default_python_default(), - element_type=(str, NoneType), - validation=default_python_validation)) + PrimitiveParameter( + default_python_default(), + element_type=(str, NoneType), + validation=default_python_validation, + ) + ) download_only = ParameterLoader(PrimitiveParameter(False)) enable_private_envs = ParameterLoader(PrimitiveParameter(False)) force_32bit = ParameterLoader(PrimitiveParameter(False)) @@ -174,11 +187,13 @@ class Context(Configuration): pip_interop_enabled = ParameterLoader(PrimitiveParameter(False)) # multithreading in various places - _default_threads = ParameterLoader(PrimitiveParameter(0, element_type=int), - aliases=('default_threads',)) + _default_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("default_threads",) + ) # download repodata - _repodata_threads = ParameterLoader(PrimitiveParameter(0, element_type=int), - aliases=('repodata_threads',)) + _repodata_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("repodata_threads",) + ) # download packages; determined experimentally _fetch_threads = ParameterLoader( PrimitiveParameter(5, element_type=int), aliases=("fetch_threads",) @@ -187,134 +202,173 @@ class Context(Configuration): PrimitiveParameter(0, element_type=int), aliases=("verify_threads",) ) # this one actually defaults to 1 - that is handled in the property below - _execute_threads = ParameterLoader(PrimitiveParameter(0, element_type=int), - aliases=('execute_threads',)) + _execute_threads = ParameterLoader( + PrimitiveParameter(0, element_type=int), aliases=("execute_threads",) + ) # Safety & Security _aggressive_update_packages = ParameterLoader( SequenceParameter( - PrimitiveParameter("", element_type=str), - DEFAULT_AGGRESSIVE_UPDATE_PACKAGES), - aliases=('aggressive_update_packages',)) + PrimitiveParameter("", element_type=str), DEFAULT_AGGRESSIVE_UPDATE_PACKAGES + ), + aliases=("aggressive_update_packages",), + ) safety_checks = ParameterLoader(PrimitiveParameter(SafetyChecks.warn)) extra_safety_checks = ParameterLoader(PrimitiveParameter(False)) _signing_metadata_url_base = ParameterLoader( PrimitiveParameter(None, element_type=(str, NoneType)), - aliases=('signing_metadata_url_base',)) + aliases=("signing_metadata_url_base",), + ) path_conflict = ParameterLoader(PrimitiveParameter(PathConflict.clobber)) - pinned_packages = ParameterLoader(SequenceParameter( - PrimitiveParameter("", element_type=str), - string_delimiter='&')) # TODO: consider a different string delimiter # NOQA + pinned_packages = ParameterLoader( + SequenceParameter( + PrimitiveParameter("", element_type=str), string_delimiter="&" + ) + ) # TODO: consider a different string delimiter # NOQA disallowed_packages = ParameterLoader( SequenceParameter( - PrimitiveParameter("", element_type=str), string_delimiter='&'), - aliases=('disallow',)) + PrimitiveParameter("", element_type=str), string_delimiter="&" + ), + aliases=("disallow",), + ) rollback_enabled = ParameterLoader(PrimitiveParameter(True)) track_features = ParameterLoader( - SequenceParameter(PrimitiveParameter("", element_type=str))) + SequenceParameter(PrimitiveParameter("", element_type=str)) + ) use_index_cache = ParameterLoader(PrimitiveParameter(False)) separate_format_cache = ParameterLoader(PrimitiveParameter(False)) - _root_prefix = ParameterLoader(PrimitiveParameter(""), aliases=('root_dir', 'root_prefix')) + _root_prefix = ParameterLoader( + PrimitiveParameter(""), aliases=("root_dir", "root_prefix") + ) _envs_dirs = ParameterLoader( - SequenceParameter(PrimitiveParameter("", element_type=str), - string_delimiter=os.pathsep), - aliases=('envs_dirs', 'envs_path'), - expandvars=True) - _pkgs_dirs = ParameterLoader(SequenceParameter(PrimitiveParameter("", str)), - aliases=('pkgs_dirs',), - expandvars=True) - _subdir = ParameterLoader(PrimitiveParameter(''), aliases=('subdir',)) + SequenceParameter( + PrimitiveParameter("", element_type=str), string_delimiter=os.pathsep + ), + aliases=("envs_dirs", "envs_path"), + expandvars=True, + ) + _pkgs_dirs = ParameterLoader( + SequenceParameter(PrimitiveParameter("", str)), + aliases=("pkgs_dirs",), + expandvars=True, + ) + _subdir = ParameterLoader(PrimitiveParameter(""), aliases=("subdir",)) _subdirs = ParameterLoader( - SequenceParameter(PrimitiveParameter("", str)), aliases=('subdirs',)) + SequenceParameter(PrimitiveParameter("", str)), aliases=("subdirs",) + ) - local_repodata_ttl = ParameterLoader(PrimitiveParameter(1, element_type=(bool, int))) + local_repodata_ttl = ParameterLoader( + PrimitiveParameter(1, element_type=(bool, int)) + ) # number of seconds to cache repodata locally # True/1: respect Cache-Control max-age header # False/0: always fetch remote repodata (HTTP 304 responses respected) # remote connection details ssl_verify = ParameterLoader( - PrimitiveParameter(True, - element_type=(str, bool), - validation=ssl_verify_validation), - aliases=('verify_ssl',), - expandvars=True) + PrimitiveParameter( + True, element_type=(str, bool), validation=ssl_verify_validation + ), + aliases=("verify_ssl",), + expandvars=True, + ) client_ssl_cert = ParameterLoader( PrimitiveParameter(None, element_type=(str, NoneType)), - aliases=('client_cert',), - expandvars=True) + aliases=("client_cert",), + expandvars=True, + ) client_ssl_cert_key = ParameterLoader( PrimitiveParameter(None, element_type=(str, NoneType)), - aliases=('client_cert_key',), - expandvars=True) + aliases=("client_cert_key",), + expandvars=True, + ) proxy_servers = ParameterLoader( - MapParameter(PrimitiveParameter(None, (str, NoneType))), - expandvars=True) + MapParameter(PrimitiveParameter(None, (str, NoneType))), expandvars=True + ) remote_connect_timeout_secs = ParameterLoader(PrimitiveParameter(9.15)) - remote_read_timeout_secs = ParameterLoader(PrimitiveParameter(60.)) + remote_read_timeout_secs = ParameterLoader(PrimitiveParameter(60.0)) remote_max_retries = ParameterLoader(PrimitiveParameter(3)) remote_backoff_factor = ParameterLoader(PrimitiveParameter(1)) - add_anaconda_token = ParameterLoader(PrimitiveParameter(True), aliases=('add_binstar_token',)) + add_anaconda_token = ParameterLoader( + PrimitiveParameter(True), aliases=("add_binstar_token",) + ) # ############################# # channels # ############################# allow_non_channel_urls = ParameterLoader(PrimitiveParameter(False)) _channel_alias = ParameterLoader( - PrimitiveParameter(DEFAULT_CHANNEL_ALIAS, - validation=channel_alias_validation), - aliases=('channel_alias',), - expandvars=True) + PrimitiveParameter(DEFAULT_CHANNEL_ALIAS, validation=channel_alias_validation), + aliases=("channel_alias",), + expandvars=True, + ) channel_priority = ParameterLoader(PrimitiveParameter(ChannelPriority.FLEXIBLE)) _channels = ParameterLoader( - SequenceParameter(PrimitiveParameter( - "", element_type=str), default=(DEFAULTS_CHANNEL_NAME,)), - aliases=('channels', 'channel',), - expandvars=True) # channel for args.channel + SequenceParameter( + PrimitiveParameter("", element_type=str), default=(DEFAULTS_CHANNEL_NAME,) + ), + aliases=( + "channels", + "channel", + ), + expandvars=True, + ) # channel for args.channel channel_settings = ParameterLoader( SequenceParameter(MapParameter(PrimitiveParameter("", element_type=str))) ) _custom_channels = ParameterLoader( MapParameter(PrimitiveParameter("", element_type=str), DEFAULT_CUSTOM_CHANNELS), - aliases=('custom_channels',), - expandvars=True) + aliases=("custom_channels",), + expandvars=True, + ) _custom_multichannels = ParameterLoader( MapParameter(SequenceParameter(PrimitiveParameter("", element_type=str))), - aliases=('custom_multichannels',), - expandvars=True) + aliases=("custom_multichannels",), + expandvars=True, + ) _default_channels = ParameterLoader( SequenceParameter(PrimitiveParameter("", element_type=str), DEFAULT_CHANNELS), - aliases=('default_channels',), - expandvars=True) + aliases=("default_channels",), + expandvars=True, + ) _migrated_channel_aliases = ParameterLoader( SequenceParameter(PrimitiveParameter("", element_type=str)), - aliases=('migrated_channel_aliases',)) + aliases=("migrated_channel_aliases",), + ) migrated_custom_channels = ParameterLoader( - MapParameter(PrimitiveParameter("", element_type=str)), - expandvars=True) # TODO: also take a list of strings + MapParameter(PrimitiveParameter("", element_type=str)), expandvars=True + ) # TODO: also take a list of strings override_channels_enabled = ParameterLoader(PrimitiveParameter(True)) - show_channel_urls = ParameterLoader(PrimitiveParameter(None, element_type=(bool, NoneType))) + show_channel_urls = ParameterLoader( + PrimitiveParameter(None, element_type=(bool, NoneType)) + ) use_local = ParameterLoader(PrimitiveParameter(False)) allowlist_channels = ParameterLoader( SequenceParameter(PrimitiveParameter("", element_type=str)), aliases=("whitelist_channels",), - expandvars=True) + expandvars=True, + ) restore_free_channel = ParameterLoader(PrimitiveParameter(False)) repodata_fns = ParameterLoader( SequenceParameter( PrimitiveParameter("", element_type=str), - ("current_repodata.json", REPODATA_FN))) - _use_only_tar_bz2 = ParameterLoader(PrimitiveParameter(None, element_type=(bool, NoneType)), - aliases=('use_only_tar_bz2',)) + ("current_repodata.json", REPODATA_FN), + ) + ) + _use_only_tar_bz2 = ParameterLoader( + PrimitiveParameter(None, element_type=(bool, NoneType)), + aliases=("use_only_tar_bz2",), + ) - always_softlink = ParameterLoader(PrimitiveParameter(False), aliases=('softlink',)) - always_copy = ParameterLoader(PrimitiveParameter(False), aliases=('copy',)) + always_softlink = ParameterLoader(PrimitiveParameter(False), aliases=("softlink",)) + always_copy = ParameterLoader(PrimitiveParameter(False), aliases=("copy",)) always_yes = ParameterLoader( - PrimitiveParameter(None, element_type=(bool, NoneType)), aliases=('yes',)) + PrimitiveParameter(None, element_type=(bool, NoneType)), aliases=("yes",) + ) debug = ParameterLoader(PrimitiveParameter(False)) dev = ParameterLoader(PrimitiveParameter(False)) dry_run = ParameterLoader(PrimitiveParameter(False)) @@ -324,11 +378,14 @@ class Context(Configuration): offline = ParameterLoader(PrimitiveParameter(False)) quiet = ParameterLoader(PrimitiveParameter(False)) ignore_pinned = ParameterLoader(PrimitiveParameter(False)) - report_errors = ParameterLoader(PrimitiveParameter(None, element_type=(bool, NoneType))) + report_errors = ParameterLoader( + PrimitiveParameter(None, element_type=(bool, NoneType)) + ) shortcuts = ParameterLoader(PrimitiveParameter(True)) number_channel_notices = ParameterLoader(PrimitiveParameter(5, element_type=int)) _verbosity = ParameterLoader( - PrimitiveParameter(0, element_type=int), aliases=('verbose', 'verbosity')) + PrimitiveParameter(0, element_type=int), aliases=("verbose", "verbosity") + ) experimental = ParameterLoader(SequenceParameter(PrimitiveParameter("", str))) # ###################################################### @@ -361,19 +418,22 @@ def experimental_solver(self): force_remove = ParameterLoader(PrimitiveParameter(False)) force_reinstall = ParameterLoader(PrimitiveParameter(False)) - target_prefix_override = ParameterLoader(PrimitiveParameter('')) + target_prefix_override = ParameterLoader(PrimitiveParameter("")) unsatisfiable_hints = ParameterLoader(PrimitiveParameter(True)) unsatisfiable_hints_check_depth = ParameterLoader(PrimitiveParameter(2)) # conda_build - bld_path = ParameterLoader(PrimitiveParameter('')) + bld_path = ParameterLoader(PrimitiveParameter("")) anaconda_upload = ParameterLoader( - PrimitiveParameter(None, element_type=(bool, NoneType)), aliases=('binstar_upload',)) - _croot = ParameterLoader(PrimitiveParameter(''), aliases=('croot',)) + PrimitiveParameter(None, element_type=(bool, NoneType)), + aliases=("binstar_upload",), + ) + _croot = ParameterLoader(PrimitiveParameter(""), aliases=("croot",)) _conda_build = ParameterLoader( MapParameter(PrimitiveParameter("", element_type=str)), - aliases=('conda-build', 'conda_build')) + aliases=("conda-build", "conda_build"), + ) def __init__(self, search_path=None, argparse_args=None): if search_path is None: @@ -382,31 +442,51 @@ def __init__(self, search_path=None, argparse_args=None): if argparse_args: # This block of code sets CONDA_PREFIX based on '-n' and '-p' flags, so that # configuration can be properly loaded from those locations - func_name = ('func' in argparse_args and argparse_args.func or '').rsplit('.', 1)[-1] - if func_name in ('create', 'install', 'update', 'remove', 'uninstall', 'upgrade'): - if 'prefix' in argparse_args and argparse_args.prefix: - os.environ['CONDA_PREFIX'] = argparse_args.prefix - elif 'name' in argparse_args and argparse_args.name: + func_name = ("func" in argparse_args and argparse_args.func or "").rsplit( + ".", 1 + )[-1] + if func_name in ( + "create", + "install", + "update", + "remove", + "uninstall", + "upgrade", + ): + if "prefix" in argparse_args and argparse_args.prefix: + os.environ["CONDA_PREFIX"] = argparse_args.prefix + elif "name" in argparse_args and argparse_args.name: # Currently, usage of the '-n' flag is inefficient, with all configuration # files being loaded/re-loaded at least two times. target_prefix = determine_target_prefix(context, argparse_args) if target_prefix != context.root_prefix: - os.environ['CONDA_PREFIX'] = determine_target_prefix(context, - argparse_args) + os.environ["CONDA_PREFIX"] = determine_target_prefix( + context, argparse_args + ) - super().__init__(search_path=search_path, app_name=APP_NAME, argparse_args=argparse_args) + super().__init__( + search_path=search_path, app_name=APP_NAME, argparse_args=argparse_args + ) def post_build_validation(self): errors = [] if self.client_ssl_cert_key and not self.client_ssl_cert: - error = ValidationError('client_ssl_cert', self.client_ssl_cert, "<>", - "'client_ssl_cert' is required when 'client_ssl_cert_key' " - "is defined") + error = ValidationError( + "client_ssl_cert", + self.client_ssl_cert, + "<>", + "'client_ssl_cert' is required when 'client_ssl_cert_key' " + "is defined", + ) errors.append(error) if self.always_copy and self.always_softlink: - error = ValidationError('always_copy', self.always_copy, "<>", - "'always_copy' and 'always_softlink' are mutually exclusive. " - "Only one can be set to 'True'.") + error = ValidationError( + "always_copy", + self.always_copy, + "<>", + "'always_copy' and 'always_softlink' are mutually exclusive. " + "Only one can be set to 'True'.", + ) errors.append(error) return errors @@ -417,20 +497,29 @@ def plugin_manager(self): and is located here to avoid problems with cyclical imports elsewhere in the code. """ from ..plugins.manager import get_plugin_manager + return get_plugin_manager() @property def conda_build_local_paths(self): # does file system reads to make sure paths actually exist - return tuple(unique(full_path for full_path in ( - expand(d) for d in ( - self._croot, - self.bld_path, - self.conda_build.get('root-dir'), - join(self.root_prefix, 'conda-bld'), - '~/conda-bld', - ) if d - ) if isdir(full_path))) + return tuple( + unique( + full_path + for full_path in ( + expand(d) + for d in ( + self._croot, + self.bld_path, + self.conda_build.get("root-dir"), + join(self.root_prefix, "conda-bld"), + "~/conda-bld", + ) + if d + ) + if isdir(full_path) + ) + ) @property def conda_build_local_urls(self): @@ -443,12 +532,12 @@ def croot(self): return abspath(expanduser(self._croot)) elif self.bld_path: return abspath(expanduser(self.bld_path)) - elif 'root-dir' in self.conda_build: - return abspath(expanduser(self.conda_build['root-dir'])) + elif "root-dir" in self.conda_build: + return abspath(expanduser(self.conda_build["root-dir"])) elif self.root_writable: - return join(self.root_prefix, 'conda-bld') + return join(self.root_prefix, "conda-bld") else: - return expand('~/conda-bld') + return expand("~/conda-bld") @property def local_build_root(self): @@ -482,7 +571,7 @@ def conda_private(self): @property def platform(self): - return _platform_map.get(sys.platform, 'unknown') + return _platform_map.get(sys.platform, "unknown") @property def default_threads(self) -> Optional[int]: @@ -526,7 +615,7 @@ def subdir(self): elif self.platform == "zos": return "zos-z" else: - return '%s-%d' % (self.platform, self.bits) + return "%s-%d" % (self.platform, self.bits) @property def subdirs(self): @@ -556,7 +645,7 @@ def root_writable(self): path = join(self.root_prefix, PREFIX_MAGIC_FILE) if isfile(path): try: - fh = open(path, 'a+') + fh = open(path, "a+") except OSError as e: log.debug(e) return False @@ -567,29 +656,35 @@ def root_writable(self): @property def envs_dirs(self): - return mockable_context_envs_dirs(self.root_writable, self.root_prefix, self._envs_dirs) + return mockable_context_envs_dirs( + self.root_writable, self.root_prefix, self._envs_dirs + ) @property def pkgs_dirs(self): if self._pkgs_dirs: return tuple(IndexedSet(expand(p) for p in self._pkgs_dirs)) else: - cache_dir_name = 'pkgs32' if context.force_32bit else 'pkgs' + cache_dir_name = "pkgs32" if context.force_32bit else "pkgs" fixed_dirs = ( self.root_prefix, - join('~', '.conda'), + join("~", ".conda"), ) if on_win: - fixed_dirs += user_data_dir(APP_NAME, APP_NAME), - return tuple(IndexedSet(expand(join(p, cache_dir_name)) for p in (fixed_dirs))) + fixed_dirs += (user_data_dir(APP_NAME, APP_NAME),) + return tuple( + IndexedSet(expand(join(p, cache_dir_name)) for p in (fixed_dirs)) + ) @memoizedproperty def trash_dir(self): # TODO: this inline import can be cleaned up by moving pkgs_dir write detection logic from ..core.package_cache_data import PackageCacheData + pkgs_dir = PackageCacheData.first_writable().pkgs_dir - trash_dir = join(pkgs_dir, '.trash') + trash_dir = join(pkgs_dir, ".trash") from ..gateways.disk.create import mkdir_p + mkdir_p(trash_dir) return trash_dir @@ -597,8 +692,8 @@ def trash_dir(self): def default_prefix(self): if self.active_prefix: return self.active_prefix - _default_env = os.getenv('CONDA_DEFAULT_ENV') - if _default_env in (None, ROOT_ENV_NAME, 'root'): + _default_env = os.getenv("CONDA_DEFAULT_ENV") + if _default_env in (None, ROOT_ENV_NAME, "root"): return self.root_prefix elif os.sep in _default_env: return abspath(_default_env) @@ -615,11 +710,12 @@ def active_prefix(self): @property def shlvl(self): - return int(os.getenv('CONDA_SHLVL', -1)) + return int(os.getenv("CONDA_SHLVL", -1)) @property def aggressive_update_packages(self): from ..models.match_spec import MatchSpec + return tuple(MatchSpec(s) for s in self._aggressive_update_packages) @property @@ -652,15 +748,15 @@ def conda_exe(self): @property def av_data_dir(self): - """ Directory where critical data for artifact verification (e.g., - various public keys) can be found. """ + """Directory where critical data for artifact verification (e.g., + various public keys) can be found.""" # TODO (AV): Find ways to make this user configurable? - return join(self.conda_prefix, 'etc', 'conda') + return join(self.conda_prefix, "etc", "conda") @property def signing_metadata_url_base(self): - """ Base URL where artifact verification signing metadata (*.root.json, - key_mgr.json) can be obtained. """ + """Base URL where artifact verification signing metadata (*.root.json, + key_mgr.json) can be obtained.""" if self._signing_metadata_url_base: return self._signing_metadata_url_base else: @@ -685,8 +781,8 @@ def conda_exe_vars_dict(self): "CONDA_PYTHON_EXE": sys.executable, } else: - bin_dir = 'Scripts' if on_win else 'bin' - exe = 'conda.exe' if on_win else 'conda' + bin_dir = "Scripts" if on_win else "bin" + exe = "conda.exe" if on_win else "conda" # I was going to use None to indicate a variable to unset, but that gets tricky with # error-on-undefined. return { @@ -699,20 +795,27 @@ def conda_exe_vars_dict(self): @memoizedproperty def channel_alias(self): from ..models.channel import Channel + location, scheme, auth, token = split_scheme_auth_token(self._channel_alias) return Channel(scheme=scheme, auth=auth, location=location, token=token) @property def migrated_channel_aliases(self): from ..models.channel import Channel - return tuple(Channel(scheme=scheme, auth=auth, location=location, token=token) - for location, scheme, auth, token in - (split_scheme_auth_token(c) for c in self._migrated_channel_aliases)) + + return tuple( + Channel(scheme=scheme, auth=auth, location=location, token=token) + for location, scheme, auth, token in ( + split_scheme_auth_token(c) for c in self._migrated_channel_aliases + ) + ) @property def prefix_specified(self): - return (self._argparse_args.get("prefix") is not None - or self._argparse_args.get("name") is not None) + return ( + self._argparse_args.get("prefix") is not None + or self._argparse_args.get("name") is not None + ) @memoizedproperty def default_channels(self): @@ -727,18 +830,22 @@ def custom_multichannels(self): default_channels = list(self._default_channels) if self.restore_free_channel: - default_channels.insert(1, 'https://repo.anaconda.com/pkgs/free') + default_channels.insert(1, "https://repo.anaconda.com/pkgs/free") reserved_multichannel_urls = { DEFAULTS_CHANNEL_NAME: default_channels, "local": self.conda_build_local_urls, } reserved_multichannels = { - name: tuple(Channel.make_simple_channel(self.channel_alias, url) for url in urls) + name: tuple( + Channel.make_simple_channel(self.channel_alias, url) for url in urls + ) for name, urls in reserved_multichannel_urls.items() } custom_multichannels = { - name: tuple(Channel.make_simple_channel(self.channel_alias, url) for url in urls) + name: tuple( + Channel.make_simple_channel(self.channel_alias, url) for url in urls + ) for name, urls in self._custom_multichannels.items() } return { @@ -756,7 +863,9 @@ def custom_channels(self): return { channel.name: channel for channel in ( - *chain.from_iterable(channel for channel in self.custom_multichannels.values()), + *chain.from_iterable( + channel for channel in self.custom_multichannels.values() + ), *( Channel.make_simple_channel(self.channel_alias, url, name) for name, url in self._custom_channels.items() @@ -766,17 +875,29 @@ def custom_channels(self): @property def channels(self): - local_add = ('local',) if self.use_local else () - if (self._argparse_args and 'override_channels' in self._argparse_args - and self._argparse_args['override_channels']): + local_add = ("local",) if self.use_local else () + if ( + self._argparse_args + and "override_channels" in self._argparse_args + and self._argparse_args["override_channels"] + ): if not self.override_channels_enabled: from ..exceptions import OperationNotAllowed - raise OperationNotAllowed(dals(""" + + raise OperationNotAllowed( + dals( + """ Overriding channels has been disabled. - """)) - elif not (self._argparse_args and 'channel' in self._argparse_args - and self._argparse_args['channel']): + """ + ) + ) + elif not ( + self._argparse_args + and "channel" in self._argparse_args + and self._argparse_args["channel"] + ): from ..exceptions import ArgumentError + raise ArgumentError( "At least one -c / --channel flag must be supplied when using " "--override-channels." @@ -785,38 +906,50 @@ def channels(self): return tuple(IndexedSet((*local_add, *self._argparse_args["channel"]))) # add 'defaults' channel when necessary if --channel is given via the command line - if self._argparse_args and 'channel' in self._argparse_args: + if self._argparse_args and "channel" in self._argparse_args: # TODO: it's args.channel right now, not channels - argparse_channels = tuple(self._argparse_args['channel'] or ()) + argparse_channels = tuple(self._argparse_args["channel"] or ()) # Add condition to make sure that sure that we add the 'defaults' # channel only when no channels are defined in condarc # We needs to get the config_files and then check that they # don't define channels - channel_in_config_files = any('channels' in context.raw_data[rc_file].keys() - for rc_file in self.config_files) + channel_in_config_files = any( + "channels" in context.raw_data[rc_file].keys() + for rc_file in self.config_files + ) if argparse_channels and not channel_in_config_files: - return tuple(IndexedSet((*local_add, *argparse_channels, DEFAULTS_CHANNEL_NAME))) + return tuple( + IndexedSet((*local_add, *argparse_channels, DEFAULTS_CHANNEL_NAME)) + ) return tuple(IndexedSet((*local_add, *self._channels))) @property def config_files(self): - return tuple(path for path in context.collect_all() - if path not in ('envvars', 'cmd_line')) + return tuple( + path + for path in context.collect_all() + if path not in ("envvars", "cmd_line") + ) @property def use_only_tar_bz2(self): # we avoid importing this at the top to avoid PATH issues. Ensure that this # is only called when use_only_tar_bz2 is first called. import conda_package_handling.api + use_only_tar_bz2 = False if self._use_only_tar_bz2 is None: - if self._argparse_args and 'use_only_tar_bz2' in self._argparse_args: - use_only_tar_bz2 &= self._argparse_args['use_only_tar_bz2'] - return ((hasattr(conda_package_handling.api, 'libarchive_enabled') and - not conda_package_handling.api.libarchive_enabled) or - self._use_only_tar_bz2 or - use_only_tar_bz2) + if self._argparse_args and "use_only_tar_bz2" in self._argparse_args: + use_only_tar_bz2 &= self._argparse_args["use_only_tar_bz2"] + return ( + ( + hasattr(conda_package_handling.api, "libarchive_enabled") + and not conda_package_handling.api.libarchive_enabled + ) + or self._use_only_tar_bz2 + or use_only_tar_bz2 + ) @property def binstar_upload(self): @@ -845,7 +978,7 @@ def user_agent(self): log.debug( "User agent could not be fetched from solver class '%s'.", self.solver, - exc_info=exc + exc_info=exc, ) builder.append(user_agent_str) return " ".join(builder) @@ -902,16 +1035,17 @@ def os_distribution_name_version(self): # 'OSX', '10.13.6' # 'Windows', '10.0.17134' platform_name = self.platform_system_release[0] - if platform_name == 'Linux': + if platform_name == "Linux": from conda._vendor.distro import id, version + try: distinfo = id(), version(best=True) except Exception as e: - log.debug('%r', e, exc_info=True) - distinfo = ('Linux', 'unknown') + log.debug("%r", e, exc_info=True) + distinfo = ("Linux", "unknown") distribution_name, distribution_version = distinfo[0], distinfo[1] - elif platform_name == 'Darwin': - distribution_name = 'OSX' + elif platform_name == "Darwin": + distribution_name = "OSX" distribution_version = platform.mac_ver()[0] else: distribution_name = platform.system() @@ -929,7 +1063,7 @@ def libc_family_version(self): def cpu_flags(self): # DANGER: This is rather slow info = _get_cpu_info() - return info['flags'] + return info["flags"] @memoizedproperty @deprecated( @@ -1623,8 +1757,9 @@ def description_map(self): def reset_context(search_path=SEARCH_PATH, argparse_args=None): global context context.__init__(search_path, argparse_args) - context.__dict__.pop('_Context__conda_build', None) + context.__dict__.pop("_Context__conda_build", None) from ..models.channel import Channel + Channel._reset_state() # need to import here to avoid circular dependency return context @@ -1644,7 +1779,6 @@ def fresh_context(env=None, search_path=SEARCH_PATH, argparse_args=None, **kwarg class ContextStackObject: - def __init__(self, search_path=SEARCH_PATH, argparse_args=None): self.set_value(search_path, argparse_args) @@ -1657,7 +1791,6 @@ def apply(self): class ContextStack: - def __init__(self): self._stack = [ContextStackObject() for _ in range(3)] self._stack_idx = 0 @@ -1673,8 +1806,10 @@ def push(self, search_path, argparse_args): self.apply() def apply(self): - if self._last_search_path != self._stack[self._stack_idx].search_path or \ - self._last_argparse_args != self._stack[self._stack_idx].argparse_args: + if ( + self._last_search_path != self._stack[self._stack_idx].search_path + or self._last_argparse_args != self._stack[self._stack_idx].argparse_args + ): # Expensive: self._stack[self._stack_idx].apply() self._last_search_path = self._stack[self._stack_idx].search_path @@ -1730,6 +1865,7 @@ def replace_context_default(pushing=None, argparse_args=None): def _get_cpu_info(): # DANGER: This is rather slow from .._vendor.cpuinfo import get_cpu_info + return frozendict(get_cpu_info()) @@ -1751,7 +1887,7 @@ def locate_prefix_by_name(name, envs_dirs=None): error is raised. """ assert name - if name in (ROOT_ENV_NAME, 'root'): + if name in (ROOT_ENV_NAME, "root"): return context.root_prefix if envs_dirs is None: envs_dirs = context.envs_dirs @@ -1763,6 +1899,7 @@ def locate_prefix_by_name(name, envs_dirs=None): return abspath(prefix) from ..exceptions import EnvironmentNameNotFound + raise EnvironmentNameNotFound(name) @@ -1786,10 +1923,13 @@ def validate_prefix_name(prefix_name: str, ctx: Context, allow_base=True) -> str if allow_base: return ctx.root_prefix else: - raise CondaValueError("Use of 'base' as environment name is not allowed here.") + raise CondaValueError( + "Use of 'base' as environment name is not allowed here." + ) else: from ..exceptions import EnvironmentNameNotFound + try: return locate_prefix_by_name(prefix_name) except EnvironmentNameNotFound: @@ -1819,10 +1959,12 @@ def determine_target_prefix(ctx, args=None): if prefix_name is not None and not prefix_name.strip(): # pragma: no cover from ..exceptions import ArgumentError + raise ArgumentError("Argument --name requires a value.") if prefix_path is not None and not prefix_path.strip(): # pragma: no cover from ..exceptions import ArgumentError + raise ArgumentError("Argument --prefix requires a value.") if prefix_name is None and prefix_path is None: @@ -1837,33 +1979,36 @@ def _first_writable_envs_dir(): # Calling this function will *create* an envs directory if one does not already # exist. Any caller should intend to *use* that directory for *writing*, not just reading. for envs_dir in context.envs_dirs: - if envs_dir == os.devnull: continue # The magic file being used here could change in the future. Don't write programs # outside this code base that rely on the presence of this file. # This value is duplicated in conda.gateways.disk.create.create_envs_directory(). - envs_dir_magic_file = join(envs_dir, '.conda_envs_dir_test') + envs_dir_magic_file = join(envs_dir, ".conda_envs_dir_test") if isfile(envs_dir_magic_file): try: - open(envs_dir_magic_file, 'a').close() + open(envs_dir_magic_file, "a").close() return envs_dir except OSError: log.trace("Tried envs_dir but not writable: %s", envs_dir) else: from ..gateways.disk.create import create_envs_directory + was_created = create_envs_directory(envs_dir) if was_created: return envs_dir from ..exceptions import NoWritableEnvsDirError + raise NoWritableEnvsDirError(context.envs_dirs) # backward compatibility for conda-build -@deprecated("23.3", "23.9", addendum="Use `conda.base.context.determine_target_prefix` instead.") +@deprecated( + "23.3", "23.9", addendum="Use `conda.base.context.determine_target_prefix` instead." +) def get_prefix(ctx, args, search=True): # pragma: no cover return determine_target_prefix(ctx or context, args) diff --git a/conda/cli/common.py b/conda/cli/common.py index 0eb78b5ed99..3a889340b69 100644 --- a/conda/cli/common.py +++ b/conda/cli/common.py @@ -1,11 +1,10 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from logging import getLogger -from os.path import basename, dirname, isdir, isfile, join, normcase import re import sys +from logging import getLogger +from os.path import basename, dirname, isdir, isfile, join, normcase -from ..deprecations import deprecated from ..auxlib.ish import dals from ..base.constants import ROOT_ENV_NAME from ..base.context import context, env_name @@ -13,20 +12,22 @@ from ..common.io import swallow_broken_pipe from ..common.path import paths_equal from ..common.serialize import json_dump +from ..deprecations import deprecated +from ..exceptions import DirectoryNotACondaEnvironmentError, EnvironmentLocationNotFound from ..models.match_spec import MatchSpec -from ..exceptions import EnvironmentLocationNotFound, DirectoryNotACondaEnvironmentError def confirm(message="Proceed", choices=("yes", "no"), default="yes", dry_run=NULL): assert default in choices, default if (dry_run is NULL and context.dry_run) or dry_run: from ..exceptions import DryRunExit + raise DryRunExit() options = [] for option in choices: if option == default: - options.append('[%s]' % option[0]) + options.append("[%s]" % option[0]) else: options.append(option[0]) message = "{} ({})? ".format(message, "/".join(options)) @@ -45,19 +46,24 @@ def confirm(message="Proceed", choices=("yes", "no"), default="yes", dry_run=NUL return choices[user_choice] -def confirm_yn(message="Proceed", default='yes', dry_run=NULL): +def confirm_yn(message="Proceed", default="yes", dry_run=NULL): if (dry_run is NULL and context.dry_run) or dry_run: from ..exceptions import DryRunExit + raise DryRunExit() if context.always_yes: return True try: - choice = confirm(message=message, choices=("yes", "no"), default=default, dry_run=dry_run) + choice = confirm( + message=message, choices=("yes", "no"), default=default, dry_run=dry_run + ) except KeyboardInterrupt: # pragma: no cover from ..exceptions import CondaSystemExit + raise CondaSystemExit("\nOperation aborted. Exiting.") - if choice == 'no': + if choice == "no": from ..exceptions import CondaSystemExit + raise CondaSystemExit("Exiting.") return True @@ -66,8 +72,12 @@ def confirm_yn(message="Proceed", default='yes', dry_run=NULL): def ensure_name_or_prefix(args, command): if not (args.name or args.prefix): from ..exceptions import CondaValueError - raise CondaValueError('either -n NAME or -p PREFIX option required,\n' - 'try "conda %s -h" for more details' % command) + + raise CondaValueError( + "either -n NAME or -p PREFIX option required,\n" + 'try "conda %s -h" for more details' % command + ) + def is_active_prefix(prefix: str) -> bool: """ @@ -89,7 +99,8 @@ def arg2spec(arg, json=False, update=False): spec = MatchSpec(arg) except: from ..exceptions import CondaValueError - raise CondaValueError('invalid package specification: %s' % arg) + + raise CondaValueError("invalid package specification: %s" % arg) name = spec.name if not spec._is_simple() and update: @@ -125,16 +136,16 @@ def specs_from_args(args, json=False): def strip_comment(line): - return line.split('#')[0].rstrip() + return line.split("#")[0].rstrip() def spec_from_line(line): m = spec_pat.match(strip_comment(line)) if m is None: return None - name, cc, pc = (m.group('name').lower(), m.group('cc'), m.group('pc')) + name, cc, pc = (m.group("name").lower(), m.group("cc"), m.group("pc")) if cc: - return name + cc.replace('=', ' ') + return name + cc.replace("=", " ") elif pc: if pc.startswith("~= "): assert ( @@ -145,7 +156,7 @@ def spec_from_line(line): ver2 = ".".join(ver.split(".")[:-1]) + ".*" return name + " >=" + ver + ",==" + ver2 else: - return name + ' ' + pc.replace(' ', '') + return name + " " + pc.replace(" ", "") else: return name @@ -159,9 +170,9 @@ def specs_from_url(url, json=False): try: for line in open(path): line = line.strip() - if not line or line.startswith('#'): + if not line or line.startswith("#"): continue - if line == '@EXPLICIT': + if line == "@EXPLICIT": explicit = True if explicit: specs.append(line) @@ -169,11 +180,12 @@ def specs_from_url(url, json=False): spec = spec_from_line(line) if spec is None: from ..exceptions import CondaValueError - raise CondaValueError("could not parse '%s' in: %s" % - (line, url)) + + raise CondaValueError(f"could not parse '{line}' in: {url}") specs.append(spec) except OSError as e: from ..exceptions import CondaFileIOError + raise CondaFileIOError(path, e) return specs @@ -184,9 +196,9 @@ def names_in_specs(names, specs): def disp_features(features): if features: - return '[%s]' % ' '.join(features) + return "[%s]" % " ".join(features) else: - return '' + return "" @swallow_broken_pipe @@ -195,33 +207,34 @@ def stdout_json(d): def stdout_json_success(success=True, **kwargs): - result = {'success': success} - actions = kwargs.pop('actions', None) + result = {"success": success} + actions = kwargs.pop("actions", None) if actions: - if 'LINK' in actions: - actions['LINK'] = [prec.dist_fields_dump() for prec in actions['LINK']] - if 'UNLINK' in actions: - actions['UNLINK'] = [prec.dist_fields_dump() for prec in actions['UNLINK']] - result['actions'] = actions + if "LINK" in actions: + actions["LINK"] = [prec.dist_fields_dump() for prec in actions["LINK"]] + if "UNLINK" in actions: + actions["UNLINK"] = [prec.dist_fields_dump() for prec in actions["UNLINK"]] + result["actions"] = actions result.update(kwargs) stdout_json(result) def print_envs_list(known_conda_prefixes, output=True): - if output: print("# conda environments:") print("#") def disp_env(prefix): - fmt = '%-20s %s %s' - active = '*' if prefix == context.active_prefix else ' ' + fmt = "%-20s %s %s" + active = "*" if prefix == context.active_prefix else " " if prefix == context.root_prefix: name = ROOT_ENV_NAME - elif any(paths_equal(envs_dir, dirname(prefix)) for envs_dir in context.envs_dirs): + elif any( + paths_equal(envs_dir, dirname(prefix)) for envs_dir in context.envs_dirs + ): name = basename(prefix) else: - name = '' + name = "" if output: print(fmt % (name, active, prefix)) @@ -234,12 +247,19 @@ def disp_env(prefix): def check_non_admin(): from ..common._os import is_admin + if not context.non_admin_enabled and not is_admin(): from ..exceptions import OperationNotAllowed - raise OperationNotAllowed(dals(""" + + raise OperationNotAllowed( + dals( + """ The create, install, update, and remove operations have been disabled on your system for non-privileged users. - """)) + """ + ) + ) + def validate_prefix(prefix): """Verifies the prefix is a valid conda environment. @@ -250,7 +270,7 @@ def validate_prefix(prefix): :rtype: str """ if isdir(prefix): - if not isfile(join(prefix, 'conda-meta', 'history')): + if not isfile(join(prefix, "conda-meta", "history")): raise DirectoryNotACondaEnvironmentError(prefix) else: raise EnvironmentLocationNotFound(prefix) diff --git a/conda/cli/conda_argparse.py b/conda/cli/conda_argparse.py index 4d7354a7180..d0285d7fa24 100644 --- a/conda/cli/conda_argparse.py +++ b/conda/cli/conda_argparse.py @@ -2,41 +2,43 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations +import os +import sys +import warnings +from argparse import REMAINDER, SUPPRESS, Action +from argparse import ArgumentParser as ArgumentParserBase from argparse import ( - ArgumentParser as ArgumentParserBase, - REMAINDER, + Namespace, RawDescriptionHelpFormatter, - SUPPRESS, - Action, - _StoreAction, _CountAction, _HelpAction, - Namespace, + _StoreAction, ) from logging import getLogger -import os from os.path import abspath, expanduser, join from subprocess import Popen -import sys from textwrap import dedent -import warnings from .. import __version__ -from ..deprecations import deprecated -from ..auxlib.ish import dals from ..auxlib.compat import isiterable -from ..base.constants import COMPATIBLE_SHELLS, CONDA_HOMEPAGE_URL, DepsModifier, \ - UpdateModifier +from ..auxlib.ish import dals +from ..base.constants import ( + COMPATIBLE_SHELLS, + CONDA_HOMEPAGE_URL, + DepsModifier, + UpdateModifier, +) from ..base.context import context from ..common.constants import NULL +from ..deprecations import deprecated log = getLogger(__name__) # duplicated code in the interest of import efficiency on_win = bool(sys.platform == "win32") -user_rc_path = abspath(expanduser('~/.condarc')) +user_rc_path = abspath(expanduser("~/.condarc")) escaped_user_rc_path = user_rc_path.replace("%", "%%") -escaped_sys_rc_path = abspath(join(sys.prefix, '.condarc')).replace("%", "%%") +escaped_sys_rc_path = abspath(join(sys.prefix, ".condarc")).replace("%", "%%") #: List of a built-in commands; these cannot be overriden by plugin subcommands BUILTIN_COMMANDS = { @@ -61,14 +63,15 @@ def generate_parser(): p = ArgumentParser( - description='conda is a tool for managing and deploying applications,' - ' environments and packages.', + description="conda is a tool for managing and deploying applications," + " environments and packages.", ) p.add_argument( - '-V', '--version', - action='version', - version='conda %s' % __version__, - help="Show the conda version number and exit." + "-V", + "--version", + action="version", + version="conda %s" % __version__, + help="Show the conda version number and exit.", ) p.add_argument( "--debug", @@ -81,8 +84,8 @@ def generate_parser(): help=SUPPRESS, ) sub_parsers = p.add_subparsers( - metavar='command', - dest='cmd', + metavar="command", + dest="cmd", required=True, ) @@ -114,10 +117,11 @@ def do_call(args, parser): if getattr(args, "plugin_subcommand", None): return args.plugin_subcommand.action(sys.argv[2:]) - relative_mod, func_name = args.func.rsplit('.', 1) + relative_mod, func_name = args.func.rsplit(".", 1) # func_name should always be 'execute' from importlib import import_module - module = import_module(relative_mod, __name__.rsplit('.', 1)[0]) + + module = import_module(relative_mod, __name__.rsplit(".", 1)[0]) return getattr(module, func_name)(args, parser) @@ -130,11 +134,11 @@ def find_builtin_commands(parser): class ArgumentParser(ArgumentParserBase): def __init__(self, *args, **kwargs): - if not kwargs.get('formatter_class'): - kwargs['formatter_class'] = RawDescriptionHelpFormatter - if 'add_help' not in kwargs: + if not kwargs.get("formatter_class"): + kwargs["formatter_class"] = RawDescriptionHelpFormatter + if "add_help" not in kwargs: add_custom_help = True - kwargs['add_help'] = False + kwargs["add_help"] = False else: add_custom_help = False super().__init__(*args, **kwargs) @@ -148,8 +152,8 @@ def __init__(self, *args, **kwargs): self._subcommands = context.plugin_manager.get_hook_results("subcommands") if self._subcommands: - self.epilog = 'conda commands available from other packages:' + ''.join( - f'\n {subcommand.name} - {subcommand.summary}' + self.epilog = "conda commands available from other packages:" + "".join( + f"\n {subcommand.name} - {subcommand.summary}" for subcommand in self._subcommands ) @@ -162,7 +166,7 @@ def _get_action_from_name(self, name): if name is None: return None for action in container: - if '/'.join(action.option_strings) == name: + if "/".join(action.option_strings) == name: return action elif action.metavar == name: return action @@ -171,12 +175,14 @@ def _get_action_from_name(self, name): def error(self, message): import re + from .find_commands import find_executable + exc = sys.exc_info()[1] if exc: # this is incredibly lame, but argparse stupidly does not expose # reasonable hooks for customizing error handling - if hasattr(exc, 'argument_name'): + if hasattr(exc, "argument_name"): argument = self._get_action_from_name(exc.argument_name) else: argument = None @@ -195,11 +201,12 @@ def error(self, message): topic="Loading conda subcommands via executables", addendum="Use the plugin system instead.", ) - executable = find_executable('conda-' + cmd) + executable = find_executable("conda-" + cmd) if not executable: from ..exceptions import CommandNotFoundError + raise CommandNotFoundError(cmd) - args = [find_executable('conda-' + cmd)] + args = [find_executable("conda-" + cmd)] args.extend(sys.argv[2:]) _exec(args, os.environ) @@ -208,14 +215,15 @@ def error(self, message): def print_help(self): super().print_help() - if sys.argv[1:] in ([], [''], ['help'], ['-h'], ['--help']): + if sys.argv[1:] in ([], [""], ["help"], ["-h"], ["--help"]): from .find_commands import find_commands + other_commands = find_commands() if other_commands: - builder = [''] + builder = [""] builder.append("conda commands available from other packages (legacy):") - builder.extend(' %s' % cmd for cmd in sorted(other_commands)) - print('\n'.join(builder)) + builder.extend(" %s" % cmd for cmd in sorted(other_commands)) + print("\n".join(builder)) def _check_value(self, action, value): # extend to properly handle when we accept multiple choices and the default is a list @@ -273,7 +281,6 @@ def _exec_unix(executable_args, env_vars): class NullCountAction(_CountAction): - @staticmethod def _ensure_value(namespace, name, value): if getattr(namespace, name, NULL) in (NULL, None): @@ -330,7 +337,9 @@ def __call__(self, parser, namespace, values, option_string=None): class DeprecatedAction(_StoreAction): def __call__(self, parser, namespace, values, option_string=None): - warnings.warn(f"Option {self.option_strings} is deprecated!", DeprecationWarning) + warnings.warn( + f"Option {self.option_strings} is deprecated!", DeprecationWarning + ) super().__call__(parser, namespace, values, option_string) @@ -340,17 +349,22 @@ def __call__(self, parser, namespace, values, option_string=None): # # ############################################################################################# + def configure_parser_clean(sub_parsers): - descr = dedent(""" + descr = dedent( + """ Remove unused packages and caches. - """) - example = dedent(""" + """ + ) + example = dedent( + """ Examples:: conda clean --tarballs - """) + """ + ) p = sub_parsers.add_parser( - 'clean', + "clean", description=descr, help=descr, epilog=example, @@ -358,42 +372,49 @@ def configure_parser_clean(sub_parsers): removal_target_options = p.add_argument_group("Removal Targets") removal_target_options.add_argument( - "-a", "--all", + "-a", + "--all", action="store_true", help="Remove index cache, lock files, unused cache packages, tarballs, and logfiles.", ) removal_target_options.add_argument( - "-i", "--index-cache", + "-i", + "--index-cache", action="store_true", help="Remove index cache.", ) removal_target_options.add_argument( - '-p', '--packages', - action='store_true', + "-p", + "--packages", + action="store_true", help="Remove unused packages from writable package caches. " - "WARNING: This does not check for packages installed using " - "symlinks back to the package cache.", + "WARNING: This does not check for packages installed using " + "symlinks back to the package cache.", ) removal_target_options.add_argument( - "-t", "--tarballs", + "-t", + "--tarballs", action="store_true", help="Remove cached package tarballs.", ) removal_target_options.add_argument( - '-f', '--force-pkgs-dirs', - action='store_true', + "-f", + "--force-pkgs-dirs", + action="store_true", help="Remove *all* writable package caches. This option is not included with the --all " - "flag. WARNING: This will break environments with packages installed using symlinks " - "back to the package cache.", + "flag. WARNING: This will break environments with packages installed using symlinks " + "back to the package cache.", ) removal_target_options.add_argument( "-c", # for tempfile extension (.c~) "--tempfiles", const=sys.prefix, action=ExtendConstAction, - help=("Remove temporary files that could not be deleted earlier due to being in-use. " - "The argument for the --tempfiles flag is a path (or list of paths) to the " - "environment(s) where the tempfiles should be found and removed."), + help=( + "Remove temporary files that could not be deleted earlier due to being in-use. " + "The argument for the --tempfiles flag is a path (or list of paths) to the " + "environment(s) where the tempfiles should be found and removed." + ), ) removal_target_options.add_argument( "-l", @@ -404,80 +425,89 @@ def configure_parser_clean(sub_parsers): add_output_and_prompt_options(p) - p.set_defaults(func='.main_clean.execute') + p.set_defaults(func=".main_clean.execute") def configure_parser_info(sub_parsers): help = "Display information about current conda install." p = sub_parsers.add_parser( - 'info', + "info", description=help, help=help, ) add_parser_json(p) p.add_argument( "--offline", - action='store_true', + action="store_true", default=NULL, help=SUPPRESS, ) p.add_argument( - '-a', "--all", + "-a", + "--all", action="store_true", help="Show all information.", ) p.add_argument( - '--base', - action='store_true', - help='Display base environment path.', + "--base", + action="store_true", + help="Display base environment path.", ) # TODO: deprecate 'conda info --envs' and create 'conda list --envs' p.add_argument( - '-e', "--envs", + "-e", + "--envs", action="store_true", help="List all known conda environments.", ) p.add_argument( - '-l', "--license", + "-l", + "--license", action="store_true", help=SUPPRESS, ) p.add_argument( - '-s', "--system", + "-s", + "--system", action="store_true", help="List environment variables.", ) p.add_argument( - '--root', - action='store_true', + "--root", + action="store_true", help=SUPPRESS, - dest='base', + dest="base", ) p.add_argument( - '--unsafe-channels', - action='store_true', - help='Display list of channels with tokens exposed.', + "--unsafe-channels", + action="store_true", + help="Display list of channels with tokens exposed.", ) p.add_argument( - 'packages', + "packages", action="store", - nargs='*', + nargs="*", help=SUPPRESS, ) - p.set_defaults(func='.main_info.execute') + p.set_defaults(func=".main_info.execute") def configure_parser_config(sub_parsers): - descr = dedent(""" + descr = ( + dedent( + """ Modify configuration values in .condarc. This is modeled after the git config command. Writes to the user .condarc file (%s) by default. Use the --show-sources flag to display all identified configuration locations on your computer. - """) % escaped_user_rc_path + """ + ) + % escaped_user_rc_path + ) # Note, the extra whitespace in the list keys is on purpose. It's so the # formatting from help2man is still valid YAML (otherwise it line wraps the @@ -529,7 +559,7 @@ def configure_parser_config(sub_parsers): ) p = sub_parsers.add_parser( - 'config', + "config", description=descr, help=descr, epilog=additional_descr, @@ -538,8 +568,9 @@ def configure_parser_config(sub_parsers): # TODO: use argparse.FileType config_file_location_group = p.add_argument_group( - 'Config File Location Selection', - "Without one of these flags, the user config file at '%s' is used." % escaped_user_rc_path + "Config File Location Selection", + "Without one of these flags, the user config file at '%s' is used." + % escaped_user_rc_path, ) location = config_file_location_group.add_mutually_exclusive_group() location.add_argument( @@ -551,17 +582,14 @@ def configure_parser_config(sub_parsers): "--env", action="store_true", help="Write to the active conda environment .condarc file (%s). " - "If no environment is active, write to the user config file (%s)." - "" % ( - os.getenv('CONDA_PREFIX', "").replace("%", "%%"), - escaped_user_rc_path, - ), - ) - location.add_argument( - "--file", - action="store", - help="Write to the given file." + "If no environment is active, write to the user config file (%s)." + "" + % ( + os.getenv("CONDA_PREFIX", "").replace("%", "%%"), + escaped_user_rc_path, + ), ) + location.add_argument("--file", action="store", help="Write to the given file.") # XXX: Does this really have to be mutually exclusive. I think the below # code will work even if it is a regular group (although combination of @@ -570,10 +598,10 @@ def configure_parser_config(sub_parsers): config_subcommands = _config_subcommands.add_mutually_exclusive_group() config_subcommands.add_argument( "--show", - nargs='*', + nargs="*", default=None, help="Display configuration values as calculated and compiled. " - "If no arguments given, show information for all configuration values.", + "If no arguments given, show information for all configuration values.", ) config_subcommands.add_argument( "--show-sources", @@ -584,31 +612,31 @@ def configure_parser_config(sub_parsers): "--validate", action="store_true", help="Validate all configuration sources. Iterates over all .condarc files " - "and checks for parsing errors.", + "and checks for parsing errors.", ) config_subcommands.add_argument( "--describe", - nargs='*', + nargs="*", default=None, help="Describe given configuration parameters. If no arguments given, show " - "information for all configuration parameters.", + "information for all configuration parameters.", ) config_subcommands.add_argument( "--write-default", action="store_true", help="Write the default configuration to a file. " - "Equivalent to `conda config --describe > ~/.condarc`.", + "Equivalent to `conda config --describe > ~/.condarc`.", ) _config_modifiers = p.add_argument_group("Config Modifiers") config_modifiers = _config_modifiers.add_mutually_exclusive_group() config_modifiers.add_argument( "--get", - nargs='*', + nargs="*", action="store", help="Get a configuration value.", default=None, - metavar='KEY', + metavar="KEY", ) config_modifiers.add_argument( "--append", @@ -616,15 +644,16 @@ def configure_parser_config(sub_parsers): action="append", help="""Add one configuration value to the end of a list key.""", default=[], - metavar=('KEY', 'VALUE'), + metavar=("KEY", "VALUE"), ) config_modifiers.add_argument( - "--prepend", "--add", + "--prepend", + "--add", nargs=2, action="append", help="""Add one configuration value to the beginning of a list key.""", default=[], - metavar=('KEY', 'VALUE'), + metavar=("KEY", "VALUE"), ) config_modifiers.add_argument( "--set", @@ -632,7 +661,7 @@ def configure_parser_config(sub_parsers): action="append", help="""Set a boolean or string key.""", default=[], - metavar=('KEY', 'VALUE'), + metavar=("KEY", "VALUE"), ) config_modifiers.add_argument( "--remove", @@ -641,7 +670,7 @@ def configure_parser_config(sub_parsers): help="""Remove a configuration value from a list key. This removes all instances of the value.""", default=[], - metavar=('KEY', 'VALUE'), + metavar=("KEY", "VALUE"), ) config_modifiers.add_argument( "--remove-key", @@ -658,22 +687,26 @@ def configure_parser_config(sub_parsers): ) p.add_argument( - "-f", "--force", + "-f", + "--force", action="store_true", default=NULL, help=SUPPRESS, # TODO: No longer used. Remove in a future release. ) - p.set_defaults(func='.main_config.execute') + p.set_defaults(func=".main_config.execute") def configure_parser_create(sub_parsers): help = "Create a new conda environment from a list of specified packages. " - descr = (help + "To use the newly-created environment, use 'conda activate " - "envname'. This command requires either the -n NAME or -p PREFIX" - "option.") + descr = ( + help + "To use the newly-created environment, use 'conda activate " + "envname'. This command requires either the -n NAME or -p PREFIX" + "option." + ) - example = dedent(""" + example = dedent( + """ Examples: Create an environment containing the package 'sqlite':: @@ -684,9 +717,10 @@ def configure_parser_create(sub_parsers): conda create -n env2 --clone path/to/file/env1 - """) + """ + ) p = sub_parsers.add_parser( - 'create', + "create", description=descr, help=help, epilog=example, @@ -703,7 +737,8 @@ def configure_parser_create(sub_parsers): add_parser_default_packages(solver_mode_options) add_parser_solver(solver_mode_options) p.add_argument( - '-m', "--mkdir", + "-m", + "--mkdir", action="store_true", help=SUPPRESS, ) @@ -711,12 +746,12 @@ def configure_parser_create(sub_parsers): "--dev", action=NullCountAction, help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", dest="dev", default=NULL, ) - p.set_defaults(func='.main_create.execute') + p.set_defaults(func=".main_create.execute") def configure_parser_init(sub_parsers): @@ -764,7 +799,7 @@ def configure_parser_init(sub_parsers): # """) p = sub_parsers.add_parser( - 'init', + "init", description=descr, help=help, epilog=epilog, @@ -784,7 +819,7 @@ def configure_parser_init(sub_parsers): default=NULL, ) - setup_type_group = p.add_argument_group('setup type') + setup_type_group = p.add_argument_group("setup type") setup_type_group.add_argument( "--install", action="store_true", @@ -818,8 +853,8 @@ def configure_parser_init(sub_parsers): ) p.add_argument( - 'shells', - nargs='*', + "shells", + nargs="*", choices=COMPATIBLE_SHELLS, metavar="SHELLS", help=( @@ -840,16 +875,19 @@ def configure_parser_init(sub_parsers): add_parser_json(p) p.add_argument( - "-d", "--dry-run", + "-d", + "--dry-run", action="store_true", help="Only display what would have been done.", ) - p.set_defaults(func='.main_init.execute') + p.set_defaults(func=".main_init.execute") def configure_parser_install(sub_parsers): help = "Installs a list of packages into a specified conda environment." - descr = dedent(help + """ + descr = dedent( + help + + """ This command accepts a list of package specifications (e.g, bitarray=0.8) and installs a set of packages consistent with those specifications and @@ -871,8 +909,10 @@ def configure_parser_install(sub_parsers): (e.g. ./lxml-3.2.0-py27_0.tar.bz2). Using conda in this mode implies the --no-deps option, and should likewise be used with great caution. Explicit filenames and package specifications cannot be mixed in a single command. - """) - example = dedent(""" + """ + ) + example = dedent( + """ Examples: Install the package 'scipy' into the currently-active environment:: @@ -887,9 +927,10 @@ def configure_parser_install(sub_parsers): conda install -p path/to/myenv python=3.10 - """) + """ + ) p = sub_parsers.add_parser( - 'install', + "install", description=descr, help=help, epilog=example, @@ -898,7 +939,7 @@ def configure_parser_install(sub_parsers): "--revision", action="store", help="Revert to the specified REVISION.", - metavar='REVISION', + metavar="REVISION", ) solver_mode_options, package_install_options = add_parser_create_install_update(p) @@ -910,11 +951,12 @@ def configure_parser_install(sub_parsers): action="store_true", default=NULL, help="Ensure that any user-requested package for the current operation is uninstalled and " - "reinstalled, even if that package already exists in the environment.", + "reinstalled, even if that package already exists in the environment.", ) add_parser_update_modifiers(solver_mode_options) package_install_options.add_argument( - '-m', "--mkdir", + "-m", + "--mkdir", action="store_true", help="Create the environment directory, if necessary.", ) @@ -923,25 +965,26 @@ def configure_parser_install(sub_parsers): action="store_true", default=NULL, help="Allow clobbering (i.e. overwriting) of overlapping file paths " - "within packages and suppress related warnings.", + "within packages and suppress related warnings.", ) p.add_argument( "--dev", action=NullCountAction, help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", dest="dev", default=NULL, ) - p.set_defaults(func='.main_install.execute') + p.set_defaults(func=".main_install.execute") def configure_parser_list(sub_parsers): descr = "List installed packages in a conda environment." # Note, the formatting of this is designed to work well with help2man - examples = dedent(""" + examples = dedent( + """ Examples: List all packages in the current environment:: @@ -964,9 +1007,10 @@ def configure_parser_list(sub_parsers): conda create -n myenv --file package-list.txt - """) + """ + ) p = sub_parsers.add_parser( - 'list', + "list", description=descr, help=descr, formatter_class=RawDescriptionHelpFormatter, @@ -978,21 +1022,23 @@ def configure_parser_list(sub_parsers): add_parser_json(p) add_parser_show_channel_urls(p) p.add_argument( - '-c', "--canonical", + "-c", + "--canonical", action="store_true", help="Output canonical names of packages only.", ) p.add_argument( - '-f', "--full-name", + "-f", + "--full-name", action="store_true", help="Only search for full names, i.e., ^$. " - "--full-name NAME is identical to regex '^NAME$'.", + "--full-name NAME is identical to regex '^NAME$'.", ) p.add_argument( "--explicit", action="store_true", help="List explicitly all installed conda packages with URL " - "(output may be used by conda create --file).", + "(output may be used by conda create --file).", ) p.add_argument( "--md5", @@ -1000,14 +1046,16 @@ def configure_parser_list(sub_parsers): help="Add MD5 hashsum when using --explicit.", ) p.add_argument( - '-e', "--export", + "-e", + "--export", action="store_true", help="Output explicit, machine-readable requirement strings instead of " - "human-readable lists of packages. This output may be used by " - "conda create --file.", + "human-readable lists of packages. This output may be used by " + "conda create --file.", ) p.add_argument( - '-r', "--revisions", + "-r", + "--revisions", action="store_true", help="List the revision history.", ) @@ -1016,14 +1064,15 @@ def configure_parser_list(sub_parsers): action="store_false", default=True, dest="pip", - help="Do not include pip-only installed packages.") + help="Do not include pip-only installed packages.", + ) p.add_argument( - 'regex', + "regex", action="store", nargs="?", help="List only packages matching this regular expression.", ) - p.set_defaults(func='.main_list.execute') + p.set_defaults(func=".main_list.execute") def configure_parser_compare(sub_parsers): @@ -1047,7 +1096,7 @@ def configure_parser_compare(sub_parsers): """ ) p = sub_parsers.add_parser( - 'compare', + "compare", description=descr, help=descr, formatter_class=RawDescriptionHelpFormatter, @@ -1058,35 +1107,38 @@ def configure_parser_compare(sub_parsers): add_parser_json(p) add_parser_prefix(p) p.add_argument( - 'file', + "file", action="store", help="Path to the environment file that is to be compared against.", ) - p.set_defaults(func='.main_compare.execute') + p.set_defaults(func=".main_compare.execute") def configure_parser_package(sub_parsers): descr = "Low-level conda package utility. (EXPERIMENTAL)" p = sub_parsers.add_parser( - 'package', + "package", description=descr, help=descr, ) add_parser_prefix(p) p.add_argument( - '-w', "--which", + "-w", + "--which", metavar="PATH", - nargs='+', + nargs="+", action="store", help="Given some file's PATH, print which conda package the file came from.", ) p.add_argument( - '-r', "--reset", + "-r", + "--reset", action="store_true", help="Remove all untracked files and exit.", ) p.add_argument( - '-u', "--untracked", + "-u", + "--untracked", action="store_true", help="Display all untracked files and exit.", ) @@ -1108,7 +1160,7 @@ def configure_parser_package(sub_parsers): default=0, help="Designate package build number of the package being created.", ) - p.set_defaults(func='.main_package.execute') + p.set_defaults(func=".main_package.execute") def configure_parser_remove(sub_parsers, aliases): @@ -1172,21 +1224,22 @@ def configure_parser_remove(sub_parsers, aliases): help="Remove features (instead of packages).", ) solver_mode_options.add_argument( - "--force-remove", "--force", + "--force-remove", + "--force", action="store_true", help="Forces removal of a package without removing packages that depend on it. " - "Using this option will usually leave your environment in a broken and " - "inconsistent state.", - dest='force_remove', + "Using this option will usually leave your environment in a broken and " + "inconsistent state.", + dest="force_remove", ) solver_mode_options.add_argument( "--no-pin", action="store_true", - dest='ignore_pinned', + dest="ignore_pinned", default=NULL, help="Ignore pinned package(s) that apply to the current operation. " - "These pinned packages might come from a .condarc file or a file in " - "/conda-meta/pinned.", + "These pinned packages might come from a .condarc file or a file in " + "/conda-meta/pinned.", ) add_parser_prune(solver_mode_options) add_parser_solver(solver_mode_options) @@ -1195,38 +1248,40 @@ def configure_parser_remove(sub_parsers, aliases): add_output_and_prompt_options(p) p.add_argument( - 'package_names', - metavar='package_name', + "package_names", + metavar="package_name", action="store", - nargs='*', + nargs="*", help="Package names to remove from the environment.", ) p.add_argument( "--dev", action=NullCountAction, help="Use `sys.executable -m conda` in wrapper scripts instead of CONDA_EXE. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", dest="dev", default=NULL, ) - p.set_defaults(func='.main_remove.execute') + p.set_defaults(func=".main_remove.execute") def configure_parser_run(sub_parsers): help = "Run an executable in a conda environment." descr = help - example = dedent(""" + example = dedent( + """ Example usage:: $ conda create -y -n my-python-env python=3 $ conda run -n my-python-env python --version - """) + """ + ) p = sub_parsers.add_parser( - 'run', + "run", description=descr, help=help, epilog=example, @@ -1234,7 +1289,8 @@ def configure_parser_run(sub_parsers): add_parser_prefix(p) p.add_argument( - "-v", "--verbose", + "-v", + "--verbose", action=NullCountAction, help="Use once for info, twice for debug, three times for trace.", dest="verbosity", @@ -1245,9 +1301,9 @@ def configure_parser_run(sub_parsers): "--dev", action=NullCountAction, help="Sets `CONDA_EXE` to `python -m conda`, assuming the current " - "working directory contains the root of conda development sources. " - "This is mainly for use during tests where we test new conda sources " - "against old Python versions.", + "working directory contains the root of conda development sources. " + "This is mainly for use during tests where we test new conda sources " + "against old Python versions.", dest="dev", default=NULL, ) @@ -1256,16 +1312,16 @@ def configure_parser_run(sub_parsers): "--debug-wrapper-scripts", action=NullCountAction, help="When this is set, where implemented, the shell wrapper scripts" - "will use the echo command to print debugging information to " - "stderr (standard error).", + "will use the echo command to print debugging information to " + "stderr (standard error).", dest="debug_wrapper_scripts", default=NULL, ) p.add_argument( "--cwd", help="Current working directory for command to run in. Defaults to " - "the user's current working directory if no directory is specified.", - default=os.getcwd() + "the user's current working directory if no directory is specified.", + default=os.getcwd(), ) p.add_argument( "--no-capture-output", @@ -1276,22 +1332,26 @@ def configure_parser_run(sub_parsers): ) p.add_argument( - 'executable_call', + "executable_call", nargs=REMAINDER, help="Executable name, with additional arguments to be passed to the executable " - "on invocation.", + "on invocation.", ) - p.set_defaults(func='.main_run.execute') + p.set_defaults(func=".main_run.execute") def configure_parser_search(sub_parsers): help = "Search for packages and display associated information." - descr = (help + """The input is a MatchSpec, a query language for conda packages. + descr = ( + help + + """The input is a MatchSpec, a query language for conda packages. See examples below. - """) + """ + ) - example = dedent(""" + example = dedent( + """ Examples: Search for a specific package named 'scikit-learn':: @@ -1321,9 +1381,10 @@ def configure_parser_search(sub_parsers): conda search conda-forge::numpy conda search 'numpy[channel=conda-forge, subdir=osx-64]' - """) + """ + ) p = sub_parsers.add_parser( - 'search', + "search", description=descr, help=descr, epilog=example, @@ -1332,25 +1393,27 @@ def configure_parser_search(sub_parsers): "--envs", action="store_true", help="Search all of the current user's environments. If run as Administrator " - "(on Windows) or UID 0 (on unix), search all known environments on the system.", + "(on Windows) or UID 0 (on unix), search all known environments on the system.", ) p.add_argument( - '-i', "--info", + "-i", + "--info", action="store_true", - help="Provide detailed information about each package." + help="Provide detailed information about each package.", ) p.add_argument( - '--subdir', '--platform', - action='store', - dest='subdir', + "--subdir", + "--platform", + action="store", + dest="subdir", help="Search the given subdir. Should be formatted like 'osx-64', 'linux-32', " - "'win-64', and so on. The default is to search the current platform.", + "'win-64', and so on. The default is to search the current platform.", default=NULL, ) p.add_argument( - 'match_spec', - default='*', - nargs='?', + "match_spec", + default="*", + nargs="?", help=SUPPRESS, ) @@ -1360,7 +1423,8 @@ def configure_parser_search(sub_parsers): help=SUPPRESS, ) p.add_argument( - '-f', "--full-name", + "-f", + "--full-name", action="store_true", help=SUPPRESS, ) @@ -1371,7 +1435,8 @@ def configure_parser_search(sub_parsers): ) add_parser_known(p) p.add_argument( - '-o', "--outdated", + "-o", + "--outdated", action="store_true", help=SUPPRESS, ) @@ -1391,7 +1456,7 @@ def configure_parser_search(sub_parsers): add_parser_channels(p) add_parser_networking(p) add_parser_json(p) - p.set_defaults(func='.main_search.execute') + p.set_defaults(func=".main_search.execute") def configure_parser_update(sub_parsers, aliases): @@ -1436,7 +1501,7 @@ def configure_parser_update(sub_parsers, aliases): action="store_true", default=NULL, help="Ensure that any user-requested package for the current operation is uninstalled and " - "reinstalled, even if that package already exists in the environment.", + "reinstalled, even if that package already exists in the environment.", ) add_parser_update_modifiers(solver_mode_options) @@ -1445,9 +1510,9 @@ def configure_parser_update(sub_parsers, aliases): action="store_true", default=NULL, help="Allow clobbering of overlapping file paths within packages, " - "and suppress related warnings.", + "and suppress related warnings.", ) - p.set_defaults(func='.main_update.execute') + p.set_defaults(func=".main_update.execute") NOTICES_HELP = "Retrieves latest channel notifications." @@ -1483,6 +1548,7 @@ def configure_parser_notices(sub_parsers, name="notices"): add_parser_channels(p) p.set_defaults(func=".main_notices.execute") + def configure_parser_rename(sub_parsers) -> None: help = "Renames an existing environment." descr = dals( @@ -1532,7 +1598,7 @@ def configure_parser_rename(sub_parsers) -> None: "-d", "--dry-run", help="Only display what would have been done by the current command, arguments, " - "and other flags.", + "and other flags.", action="store_true", default=False, ) @@ -1545,6 +1611,7 @@ def configure_parser_rename(sub_parsers) -> None: # # ############################################################################################# + def add_parser_create_install_update(p, prefix_required=False): add_parser_prefix(p, prefix_required) add_parser_channels(p) @@ -1558,7 +1625,7 @@ def add_parser_create_install_update(p, prefix_required=False): action="store_true", default=NULL, help="Solve an environment and ensure package caches are populated, but exit " - "prior to unlinking and linking packages into the prefix.", + "prior to unlinking and linking packages into the prefix.", ) add_parser_show_channel_urls(output_and_prompt_options) @@ -1570,15 +1637,15 @@ def add_parser_create_install_update(p, prefix_required=False): p.add_argument( "--file", default=[], - action='append', + action="append", help="Read package versions from the given file. Repeated file " - "specifications can be passed (e.g. --file=file1 --file=file2).", + "specifications can be passed (e.g. --file=file1 --file=file2).", ) p.add_argument( - 'packages', - metavar='package_spec', + "packages", + metavar="package_spec", action="store", - nargs='*', + nargs="*", help="List of packages to install or update in the conda environment.", ) @@ -1586,11 +1653,7 @@ def add_parser_create_install_update(p, prefix_required=False): def add_parser_pscheck(p): - p.add_argument( - "--force-pscheck", - action="store_true", - help=SUPPRESS - ) + p.add_argument("--force-pscheck", action="store_true", help=SUPPRESS) def add_parser_show_channel_urls(p): @@ -1600,7 +1663,7 @@ def add_parser_show_channel_urls(p): dest="show_channel_urls", default=NULL, help="Show channel urls. " - "Overrides the value given by `conda config --show show_channel_urls`.", + "Overrides the value given by `conda config --show show_channel_urls`.", ) p.add_argument( "--no-show-channel-urls", @@ -1618,7 +1681,8 @@ def add_parser_help(p): """ p.add_argument( - '-h', '--help', + "-h", + "--help", action=_HelpAction, help="Show this help message and exit.", ) @@ -1626,23 +1690,29 @@ def add_parser_help(p): def add_parser_prefix(p, prefix_required=False): target_environment_group = p.add_argument_group("Target Environment Specification") - npgroup = target_environment_group.add_mutually_exclusive_group(required=prefix_required) + npgroup = target_environment_group.add_mutually_exclusive_group( + required=prefix_required + ) npgroup.add_argument( - '-n', "--name", + "-n", + "--name", action="store", help="Name of environment.", metavar="ENVIRONMENT", ) npgroup.add_argument( - '-p', "--prefix", + "-p", + "--prefix", action="store", help="Full path to environment location (i.e. prefix).", - metavar='PATH', + metavar="PATH", ) def add_parser_json(p): - output_and_prompt_options = p.add_argument_group("Output, Prompt, and Flow Control Options") + output_and_prompt_options = p.add_argument_group( + "Output, Prompt, and Flow Control Options" + ) output_and_prompt_options.add_argument( "--debug", action="store_true", @@ -1653,17 +1723,19 @@ def add_parser_json(p): "--json", action="store_true", default=NULL, - help="Report all output as json. Suitable for using conda programmatically." + help="Report all output as json. Suitable for using conda programmatically.", ) output_and_prompt_options.add_argument( - "-v", "--verbose", + "-v", + "--verbose", action=NullCountAction, help="Use once for info, twice for debug, three times for trace.", dest="verbosity", default=NULL, ) output_and_prompt_options.add_argument( - '-q', "--quiet", + "-q", + "--quiet", action="store_true", default=NULL, help="Do not display progress bar.", @@ -1672,7 +1744,9 @@ def add_parser_json(p): def add_output_and_prompt_options(p): - output_and_prompt_options = p.add_argument_group("Output, Prompt, and Flow Control Options") + output_and_prompt_options = p.add_argument_group( + "Output, Prompt, and Flow Control Options" + ) output_and_prompt_options.add_argument( "--debug", action="store_true", @@ -1680,7 +1754,8 @@ def add_output_and_prompt_options(p): help=SUPPRESS, ) output_and_prompt_options.add_argument( - "-d", "--dry-run", + "-d", + "--dry-run", action="store_true", help="Only display what would have been done.", ) @@ -1688,27 +1763,30 @@ def add_output_and_prompt_options(p): "--json", action="store_true", default=NULL, - help="Report all output as json. Suitable for using conda programmatically." + help="Report all output as json. Suitable for using conda programmatically.", ) output_and_prompt_options.add_argument( - '-q', "--quiet", + "-q", + "--quiet", action="store_true", default=NULL, help="Do not display progress bar.", ) output_and_prompt_options.add_argument( - "-v", "--verbose", + "-v", + "--verbose", action=NullCountAction, help="Can be used multiple times. Once for INFO, twice for DEBUG, three times for TRACE.", dest="verbosity", default=NULL, ) output_and_prompt_options.add_argument( - "-y", "--yes", + "-y", + "--yes", action="store_true", default=NULL, help="Sets any confirmation values to 'yes' automatically. " - "Users will not be asked to confirm any adding, deleting, backups, etc.", + "Users will not be asked to confirm any adding, deleting, backups, etc.", ) return output_and_prompt_options @@ -1716,17 +1794,20 @@ def add_output_and_prompt_options(p): def add_parser_channels(p): channel_customization_options = p.add_argument_group("Channel Customization") channel_customization_options.add_argument( - '-c', '--channel', - dest='channel', # apparently conda-build uses this; someday rename to channels are remove context.channels alias to channel # NOQA + "-c", + "--channel", + dest="channel", # apparently conda-build uses this; someday rename to channels are remove context.channels alias to channel # NOQA # TODO: if you ever change 'channel' to 'channels', make sure you modify the context.channels property accordingly # NOQA action="append", - help=("Additional channel to search for packages. These are URLs searched in the order " - "they are given (including local directories using the 'file://' syntax or " - "simply a path like '/home/conda/mychan' or '../mychan'). Then, the defaults " - "or channels from .condarc are searched (unless --override-channels is given). " - "You can use 'defaults' to get the default packages for conda. You can also " - "use any name and the .condarc channel_alias value will be prepended. The " - "default channel_alias is https://conda.anaconda.org/.") + help=( + "Additional channel to search for packages. These are URLs searched in the order " + "they are given (including local directories using the 'file://' syntax or " + "simply a path like '/home/conda/mychan' or '../mychan'). Then, the defaults " + "or channels from .condarc are searched (unless --override-channels is given). " + "You can use 'defaults' to get the default packages for conda. You can also " + "use any name and the .condarc channel_alias value will be prepended. The " + "default channel_alias is https://conda.anaconda.org/." + ), ) channel_customization_options.add_argument( "--use-local", @@ -1743,14 +1824,16 @@ def add_parser_channels(p): "--repodata-fn", action="append", dest="repodata_fns", - help=("Specify file name of repodata on the remote server where your channels " - "are configured or within local backups. Conda will try whatever you " - "specify, but will ultimately fall back to repodata.json if your specs are " - "not satisfiable with what you specify here. This is used to employ repodata " - "that is smaller and reduced in time scope. You may pass this flag more than " - "once. Leftmost entries are tried first, and the fallback to repodata.json " - "is added for you automatically. For more information, see " - "conda config --describe repodata_fns.") + help=( + "Specify file name of repodata on the remote server where your channels " + "are configured or within local backups. Conda will try whatever you " + "specify, but will ultimately fall back to repodata.json if your specs are " + "not satisfiable with what you specify here. This is used to employ repodata " + "that is smaller and reduced in time scope. You may pass this flag more than " + "once. Leftmost entries are tried first, and the fallback to repodata.json " + "is added for you automatically. For more information, see " + "conda config --describe repodata_fns." + ), ) channel_customization_options.add_argument( "--experimental", @@ -1772,7 +1855,7 @@ def add_parser_solver_mode(p): default=NULL, const="strict", help="Packages in lower priority channels are not considered if a package " - "with the same name appears in a higher priority channel.", + "with the same name appears in a higher priority channel.", ) solver_mode_options.add_argument( "--channel-priority", @@ -1788,7 +1871,7 @@ def add_parser_solver_mode(p): default=NULL, const="disabled", help="Package version takes precedence over channel priority. " - "Overrides the value given by `conda config --show channel_priority`." + "Overrides the value given by `conda config --show channel_priority`.", ) deps_modifiers.add_argument( "--no-deps", @@ -1796,7 +1879,7 @@ def add_parser_solver_mode(p): const=DepsModifier.NO_DEPS, dest="deps_modifier", help="Do not install, update, remove, or change dependencies. This WILL lead " - "to broken environments and inconsistent behavior. Use at your own risk.", + "to broken environments and inconsistent behavior. Use at your own risk.", default=NULL, ) deps_modifiers.add_argument( @@ -1810,7 +1893,7 @@ def add_parser_solver_mode(p): solver_mode_options.add_argument( "--no-pin", action="store_true", - dest='ignore_pinned', + dest="ignore_pinned", default=NULL, help="Ignore pinned file.", ) @@ -1820,7 +1903,8 @@ def add_parser_solver_mode(p): def add_parser_update_modifiers(solver_mode_options): update_modifiers = solver_mode_options.add_mutually_exclusive_group() update_modifiers.add_argument( - "--freeze-installed", "--no-update-deps", + "--freeze-installed", + "--no-update-deps", action="store_const", const=UpdateModifier.FREEZE_INSTALLED, dest="update_modifier", @@ -1836,19 +1920,21 @@ def add_parser_update_modifiers(solver_mode_options): help="Update dependencies that have available updates.", ) update_modifiers.add_argument( - "-S", "--satisfied-skip-solve", + "-S", + "--satisfied-skip-solve", action="store_const", const=UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE, dest="update_modifier", default=NULL, help="Exit early and do not run the solver if the requested specs are satisfied. " - "Also skips aggressive updates as configured by the " - "'aggressive_update_packages' config setting. Use " - "'conda info --describe aggressive_update_packages' to view your setting. " - "--satisfied-skip-solve is similar to the default behavior of 'pip install'.", + "Also skips aggressive updates as configured by the " + "'aggressive_update_packages' config setting. Use " + "'conda info --describe aggressive_update_packages' to view your setting. " + "--satisfied-skip-solve is similar to the default behavior of 'pip install'.", ) update_modifiers.add_argument( - "--update-all", "--all", + "--update-all", + "--all", action="store_const", const=UpdateModifier.UPDATE_ALL, dest="update_modifier", @@ -1880,7 +1966,9 @@ def add_parser_solver(p): See ``context.solver`` for more info. """ - solver_choices = [solver.name for solver in context.plugin_manager.get_hook_results("solvers")] + solver_choices = [ + solver.name for solver in context.plugin_manager.get_hook_results("solvers") + ] group = p.add_mutually_exclusive_group() group.add_argument( "--solver", @@ -1902,24 +1990,26 @@ def add_parser_solver(p): def add_parser_networking(p): networking_options = p.add_argument_group("Networking Options") networking_options.add_argument( - "-C", "--use-index-cache", + "-C", + "--use-index-cache", action="store_true", default=False, help="Use cache of channel index files, even if it has expired. This is useful " - "if you don't want conda to check whether a new version of the repodata " - "file exists, which will save bandwidth.", + "if you don't want conda to check whether a new version of the repodata " + "file exists, which will save bandwidth.", ) networking_options.add_argument( - "-k", "--insecure", + "-k", + "--insecure", action="store_false", dest="ssl_verify", default=NULL, - help="Allow conda to perform \"insecure\" SSL connections and transfers. " - "Equivalent to setting 'ssl_verify' to 'false'." + help='Allow conda to perform "insecure" SSL connections and transfers. ' + "Equivalent to setting 'ssl_verify' to 'false'.", ) networking_options.add_argument( "--offline", - action='store_true', + action="store_true", default=NULL, help="Offline mode. Don't connect to the Internet.", ) @@ -1927,18 +2017,21 @@ def add_parser_networking(p): def add_parser_package_install_options(p): - package_install_options = p.add_argument_group("Package Linking and Install-time Options") + package_install_options = p.add_argument_group( + "Package Linking and Install-time Options" + ) package_install_options.add_argument( - '-f', "--force", + "-f", + "--force", action="store_true", default=NULL, help=SUPPRESS, ) package_install_options.add_argument( - '--copy', + "--copy", action="store_true", default=NULL, - help="Install all packages using copies instead of hard- or soft-linking." + help="Install all packages using copies instead of hard- or soft-linking.", ) if on_win: package_install_options.add_argument( @@ -1963,13 +2056,14 @@ def add_parser_known(p): "--unknown", action="store_true", default=False, - dest='unknown', + dest="unknown", help=SUPPRESS, ) + def add_parser_default_packages(p): p.add_argument( "--no-default-packages", action="store_true", - help='Ignore create_default_packages in the .condarc file.', + help="Ignore create_default_packages in the .condarc file.", ) diff --git a/conda/cli/find_commands.py b/conda/cli/find_commands.py index 5833099ce3e..b3880c81588 100644 --- a/conda/cli/find_commands.py +++ b/conda/cli/find_commands.py @@ -1,12 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from functools import lru_cache import os -from os.path import basename, expanduser, isdir, isfile, join import re import sys import sysconfig +from functools import lru_cache +from os.path import basename, expanduser, isdir, isfile, join from ..common.compat import on_win @@ -17,22 +16,22 @@ def find_executable(executable, include_others=True): if include_others: from ..utils import sys_prefix_unfollowed + prefixes = [sys_prefix_unfollowed()] if sys.prefix != prefixes[0]: prefixes.append(sys.prefix) - dir_paths = [join(p, basename(sysconfig.get_path('scripts'))) - for p in prefixes] + dir_paths = [join(p, basename(sysconfig.get_path("scripts"))) for p in prefixes] # Is this still needed? if on_win: - dir_paths.append('C:\\cygwin\\bin') + dir_paths.append("C:\\cygwin\\bin") else: dir_paths = [] - dir_paths.extend(os.environ.get('PATH', '').split(os.pathsep)) + dir_paths.extend(os.environ.get("PATH", "").split(os.pathsep)) for dir_path in dir_paths: if on_win: - for ext in ('.exe', '.bat', ''): + for ext in (".exe", ".bat", ""): path = join(dir_path, executable + ext) if isfile(path): return path @@ -45,24 +44,23 @@ def find_executable(executable, include_others=True): @lru_cache(maxsize=None) def find_commands(include_others=True): - if include_others: from ..utils import sys_prefix_unfollowed + prefixes = [sys_prefix_unfollowed()] if sys.prefix != prefixes[0]: prefixes.append(sys.prefix) - dir_paths = [join(p, basename(sysconfig.get_path('scripts'))) - for p in prefixes] + dir_paths = [join(p, basename(sysconfig.get_path("scripts"))) for p in prefixes] # Is this still needed? if on_win: - dir_paths.append('C:\\cygwin\\bin') + dir_paths.append("C:\\cygwin\\bin") else: dir_paths = [] if on_win: - pat = re.compile(r'conda-([\w\-]+)\.(exe|bat)$') + pat = re.compile(r"conda-([\w\-]+)\.(exe|bat)$") else: - pat = re.compile(r'conda-([\w\-]+)$') + pat = re.compile(r"conda-([\w\-]+)$") res = set() for dir_path in dir_paths: diff --git a/conda/cli/install.py b/conda/cli/install.py index e311cf950a6..9418f64cc71 100644 --- a/conda/cli/install.py +++ b/conda/cli/install.py @@ -1,35 +1,45 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from logging import getLogger import os +from logging import getLogger from os.path import abspath, basename, exists, isdir, isfile, join -from . import common -from .common import check_non_admin from .. import CondaError from ..auxlib.ish import dals -from ..base.constants import ROOT_ENV_NAME, DepsModifier, UpdateModifier, REPODATA_FN +from ..base.constants import REPODATA_FN, ROOT_ENV_NAME, DepsModifier, UpdateModifier from ..base.context import context, locate_prefix_by_name from ..common.constants import NULL -from ..common.path import paths_equal, is_package_file +from ..common.path import is_package_file, paths_equal from ..core.index import calculate_channel_urls, get_index from ..core.prefix_data import PrefixData -from ..exceptions import (CondaExitZero, CondaImportError, CondaOSError, CondaSystemExit, - CondaValueError, DirectoryNotACondaEnvironmentError, - DirectoryNotFoundError, DryRunExit, EnvironmentLocationNotFound, - NoBaseEnvironmentError, PackageNotInstalledError, PackagesNotFoundError, - TooManyArgumentsError, UnsatisfiableError, - SpecsConfigurationConflictError) +from ..exceptions import ( + CondaExitZero, + CondaImportError, + CondaOSError, + CondaSystemExit, + CondaValueError, + DirectoryNotACondaEnvironmentError, + DirectoryNotFoundError, + DryRunExit, + EnvironmentLocationNotFound, + NoBaseEnvironmentError, + PackageNotInstalledError, + PackagesNotFoundError, + SpecsConfigurationConflictError, + TooManyArgumentsError, + UnsatisfiableError, +) from ..gateways.disk.create import mkdir_p from ..gateways.disk.delete import delete_trash, path_is_clean from ..misc import clone_env, explicit, touch_nonadmin from ..models.match_spec import MatchSpec from ..plan import revert_actions from ..resolve import ResolvePackageNotFound +from . import common +from .common import check_non_admin log = getLogger(__name__) -stderrlog = getLogger('conda.stderr') +stderrlog = getLogger("conda.stderr") def check_prefix(prefix, json=False): @@ -38,14 +48,16 @@ def check_prefix(prefix, json=False): if name == ROOT_ENV_NAME: error = "'%s' is a reserved environment name" % name if exists(prefix): - if isdir(prefix) and 'conda-meta' not in tuple(entry.name for entry in os.scandir(prefix)): + if isdir(prefix) and "conda-meta" not in tuple( + entry.name for entry in os.scandir(prefix) + ): return None error = "prefix already exists: %s" % prefix if error: raise CondaValueError(error, json) - if ' ' in prefix: + if " " in prefix: stderrlog.warning( "WARNING: A space was detected in your requested environment path:\n" f"'{prefix}'\n" @@ -66,17 +78,16 @@ def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None): print("Source: %s" % src_prefix) print("Destination: %s" % dst_prefix) - actions, untracked_files = clone_env(src_prefix, dst_prefix, - verbose=not json, - quiet=quiet, - index_args=index_args) + actions, untracked_files = clone_env( + src_prefix, dst_prefix, verbose=not json, quiet=quiet, index_args=index_args + ) if json: common.stdout_json_success( actions=actions, untracked_files=list(untracked_files), src_prefix=src_prefix, - dst_prefix=dst_prefix + dst_prefix=dst_prefix, ) @@ -84,7 +95,8 @@ def print_activate(env_name_or_prefix): # pragma: no cover if not context.quiet and not context.json: if " " in env_name_or_prefix: env_name_or_prefix = f'"{env_name_or_prefix}"' - message = dals(f""" + message = dals( + f""" # # To activate this environment, use # @@ -93,7 +105,8 @@ def print_activate(env_name_or_prefix): # pragma: no cover # To deactivate an active environment, use # # $ conda deactivate - """) + """ + ) print(message) # TODO: use logger @@ -104,7 +117,7 @@ def get_revision(arg, json=False): raise CondaValueError("expected revision number, not: '%s'" % arg, json) -def install(args, parser, command='install'): +def install(args, parser, command="install"): """ conda install, conda update, and conda create """ @@ -114,27 +127,32 @@ def install(args, parser, command='install'): # because it deduplicates records that exist as both formats. Forcing this to # repodata.json ensures that .tar.bz2 files are available if context.use_only_tar_bz2: - args.repodata_fns = ('repodata.json', ) + args.repodata_fns = ("repodata.json",) - newenv = bool(command == 'create') - isupdate = bool(command == 'update') - isinstall = bool(command == 'install') - isremove = bool(command == 'remove') + newenv = bool(command == "create") + isupdate = bool(command == "update") + isinstall = bool(command == "install") + isremove = bool(command == "remove") prefix = context.target_prefix if newenv: check_prefix(prefix, json=context.json) if context.force_32bit and prefix == context.root_prefix: raise CondaValueError("cannot use CONDA_FORCE_32BIT=1 in base env") - if isupdate and not (args.file or args.packages - or context.update_modifier == UpdateModifier.UPDATE_ALL): - raise CondaValueError("""no package names supplied + if isupdate and not ( + args.file + or args.packages + or context.update_modifier == UpdateModifier.UPDATE_ALL + ): + raise CondaValueError( + """no package names supplied # Example: conda update -n myenv scipy -""") +""" + ) if not newenv: if isdir(prefix): delete_trash(prefix) - if not isfile(join(prefix, 'conda-meta', 'history')): + if not isfile(join(prefix, "conda-meta", "history")): if paths_equal(prefix, context.conda_prefix): raise NoBaseEnvironmentError() else: @@ -148,26 +166,30 @@ def install(args, parser, command='install'): try: mkdir_p(prefix) except OSError as e: - raise CondaOSError("Could not create directory: %s" % prefix, caused_by=e) + raise CondaOSError( + "Could not create directory: %s" % prefix, caused_by=e + ) else: raise EnvironmentLocationNotFound(prefix) - args_packages = [s.strip('"\'') for s in args.packages] + args_packages = [s.strip("\"'") for s in args.packages] if newenv and not args.no_default_packages: # Override defaults if they are specified at the command line # TODO: rework in 4.4 branch using MatchSpec - args_packages_names = [pkg.replace(' ', '=').split('=', 1)[0] for pkg in args_packages] + args_packages_names = [ + pkg.replace(" ", "=").split("=", 1)[0] for pkg in args_packages + ] for default_pkg in context.create_default_packages: - default_pkg_name = default_pkg.replace(' ', '=').split('=', 1)[0] + default_pkg_name = default_pkg.replace(" ", "=").split("=", 1)[0] if default_pkg_name not in args_packages_names: args_packages.append(default_pkg) index_args = { - 'use_cache': args.use_index_cache, - 'channel_urls': context.channels, - 'unknown': args.unknown, - 'prepend': not args.override_channels, - 'use_local': args.use_local + "use_cache": args.use_index_cache, + "channel_urls": context.channels, + "unknown": args.unknown, + "prepend": not args.override_channels, + "use_local": args.use_local, } num_cp = sum(is_package_file(s) for s in args_packages) @@ -176,8 +198,9 @@ def install(args, parser, command='install'): explicit(args_packages, prefix, verbose=not context.quiet) return else: - raise CondaValueError("cannot mix specifications with conda package" - " filenames") + raise CondaValueError( + "cannot mix specifications with conda package" " filenames" + ) specs = [] if args.file: @@ -185,9 +208,11 @@ def install(args, parser, command='install'): try: specs.extend(common.specs_from_url(fpath, json=context.json)) except UnicodeError: - raise CondaError("Error reading file, file should be a text file containing" - " packages \nconda create --help for details") - if '@EXPLICIT' in specs: + raise CondaError( + "Error reading file, file should be a text file containing" + " packages \nconda create --help for details" + ) + if "@EXPLICIT" in specs: explicit(specs, prefix, verbose=not context.quiet, index_args=index_args) return specs.extend(common.specs_from_args(args_packages, json=context.json)) @@ -195,8 +220,9 @@ def install(args, parser, command='install'): if isinstall and args.revision: get_revision(args.revision, json=context.json) elif isinstall and not (args.file or args_packages): - raise CondaValueError("too few arguments, " - "must supply command line package specs or --file") + raise CondaValueError( + "too few arguments, " "must supply command line package specs or --file" + ) # for 'conda update', make sure the requested specs actually exist in the prefix # and that they are name-only specs @@ -205,17 +231,29 @@ def install(args, parser, command='install'): for spec in specs: spec = MatchSpec(spec) if not spec.is_name_only_spec: - raise CondaError("Invalid spec for 'conda update': %s\n" - "Use 'conda install' instead." % spec) + raise CondaError( + "Invalid spec for 'conda update': %s\n" + "Use 'conda install' instead." % spec + ) if not prefix_data.get(spec.name, None): raise PackageNotInstalledError(prefix, spec.name) if newenv and args.clone: if args.packages: - raise TooManyArgumentsError(0, len(args.packages), list(args.packages), - 'did not expect any arguments for --clone') - - clone(args.clone, prefix, json=context.json, quiet=context.quiet, index_args=index_args) + raise TooManyArgumentsError( + 0, + len(args.packages), + list(args.packages), + "did not expect any arguments for --clone", + ) + + clone( + args.clone, + prefix, + json=context.json, + quiet=context.quiet, + index_args=index_args, + ) touch_nonadmin(prefix) print_activate(args.name or prefix) return @@ -226,24 +264,33 @@ def install(args, parser, command='install'): if REPODATA_FN not in repodata_fns: repodata_fns.append(REPODATA_FN) - args_set_update_modifier = hasattr(args, "update_modifier") and args.update_modifier != NULL + args_set_update_modifier = ( + hasattr(args, "update_modifier") and args.update_modifier != NULL + ) # This helps us differentiate between an update, the --freeze-installed option, and the retry # behavior in our initial fast frozen solve - _should_retry_unfrozen = (not args_set_update_modifier or args.update_modifier not in ( - UpdateModifier.FREEZE_INSTALLED, - UpdateModifier.UPDATE_SPECS)) and not newenv + _should_retry_unfrozen = ( + not args_set_update_modifier + or args.update_modifier + not in (UpdateModifier.FREEZE_INSTALLED, UpdateModifier.UPDATE_SPECS) + ) and not newenv for repodata_fn in repodata_fns: try: if isinstall and args.revision: - index = get_index(channel_urls=index_args['channel_urls'], - prepend=index_args['prepend'], platform=None, - use_local=index_args['use_local'], - use_cache=index_args['use_cache'], - unknown=index_args['unknown'], prefix=prefix, - repodata_fn=repodata_fn) - unlink_link_transaction = revert_actions(prefix, get_revision(args.revision), - index) + index = get_index( + channel_urls=index_args["channel_urls"], + prepend=index_args["prepend"], + platform=None, + use_local=index_args["use_local"], + use_cache=index_args["use_cache"], + unknown=index_args["unknown"], + prefix=prefix, + repodata_fn=repodata_fn, + ) + unlink_link_transaction = revert_actions( + prefix, get_revision(args.revision), index + ) else: solver_backend = context.plugin_manager.get_cached_solver_backend() solver = solver_backend( @@ -265,7 +312,9 @@ def install(args, parser, command='install'): deps_modifier=deps_modifier, update_modifier=update_modifier, force_reinstall=context.force_reinstall or context.force, - should_retry_solve=(_should_retry_unfrozen or repodata_fn != repodata_fns[-1]), + should_retry_solve=( + _should_retry_unfrozen or repodata_fn != repodata_fns[-1] + ), ) # we only need one of these to work. If we haven't raised an exception, # we're good. @@ -281,12 +330,14 @@ def install(args, parser, command='install'): if isinstance(e, PackagesNotFoundError): raise e else: - channels_urls = tuple(calculate_channel_urls( - channel_urls=index_args['channel_urls'], - prepend=index_args['prepend'], - platform=None, - use_local=index_args['use_local'], - )) + channels_urls = tuple( + calculate_channel_urls( + channel_urls=index_args["channel_urls"], + prepend=index_args["prepend"], + platform=None, + use_local=index_args["use_local"], + ) + ) # convert the ResolvePackageNotFound into PackagesNotFoundError raise PackagesNotFoundError(e._formatted_chains, channels_urls) @@ -304,7 +355,7 @@ def install(args, parser, command='install'): # https://github.com/conda-incubator/conda-libmamba-solver/blob/7c698209/conda_libmamba_solver/solver.py#L617 raise e # Quick solve with frozen env or trimmed repodata failed. Try again without that. - if not hasattr(args, 'update_modifier'): + if not hasattr(args, "update_modifier"): if repodata_fn == repodata_fns[-1]: raise e elif _should_retry_unfrozen: @@ -315,9 +366,13 @@ def install(args, parser, command='install'): force_reinstall=context.force_reinstall or context.force, should_retry_solve=(repodata_fn != repodata_fns[-1]), ) - except (UnsatisfiableError, SystemExit, SpecsConfigurationConflictError) as e: + except ( + UnsatisfiableError, + SystemExit, + SpecsConfigurationConflictError, + ) as e: # Unsatisfiable package specifications/no such revision/import error - if e.args and 'could not import' in e.args[0]: + if e.args and "could not import" in e.args[0]: raise CondaImportError(str(e)) # we want to fall through without raising if we're not at the end of the list # of fns. That way, we fall to the next fn. @@ -328,7 +383,7 @@ def install(args, parser, command='install'): else: # end of the line. Raise the exception # Unsatisfiable package specifications/no such revision/import error - if e.args and 'could not import' in e.args[0]: + if e.args and "could not import" in e.args[0]: raise CondaImportError(str(e)) raise e handle_txn(unlink_link_transaction, prefix, args, newenv) @@ -341,9 +396,11 @@ def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False): raise PackagesNotFoundError(args.package_names) elif not newenv: if context.json: - common.stdout_json_success(message='All requested packages already installed.') + common.stdout_json_success( + message="All requested packages already installed." + ) else: - print('\n# All requested packages already installed.\n') + print("\n# All requested packages already installed.\n") return if not context.json: @@ -358,12 +415,14 @@ def handle_txn(unlink_link_transaction, prefix, args, newenv, remove_op=False): try: unlink_link_transaction.download_and_extract() if context.download_only: - raise CondaExitZero('Package caches prepared. UnlinkLinkTransaction cancelled with ' - '--download-only option.') + raise CondaExitZero( + "Package caches prepared. UnlinkLinkTransaction cancelled with " + "--download-only option." + ) unlink_link_transaction.execute() except SystemExit as e: - raise CondaSystemExit('Exiting', e) + raise CondaSystemExit("Exiting", e) if newenv: touch_nonadmin(prefix) diff --git a/conda/cli/main.py b/conda/cli/main.py index 159d13fc174..21bdd5a6bee 100644 --- a/conda/cli/main.py +++ b/conda/cli/main.py @@ -31,18 +31,20 @@ conda -h """ -from .conda_argparse import generate_parser - import sys +from .conda_argparse import generate_parser + def init_loggers(context=None): from logging import CRITICAL, getLogger + from ..gateways.logging import initialize_logging, set_verbosity + initialize_logging() if context and context.json: # Silence logging info to avoid interfering with JSON output - for logger in ('conda.stdout.verbose', 'conda.stdoutlog', 'conda.stderrlog'): + for logger in ("conda.stdout.verbose", "conda.stdoutlog", "conda.stderrlog"): getLogger(logger).setLevel(CRITICAL + 1) if context: @@ -58,6 +60,7 @@ def main_subshell(*args, post_parse_hook=None, **kwargs): args = p.parse_args(args) from ..base.context import context + context.__init__(argparse_args=args) init_loggers(context) @@ -66,10 +69,11 @@ def main_subshell(*args, post_parse_hook=None, **kwargs): post_parse_hook(args, p) from .conda_argparse import do_call + exit_code = do_call(args, p) if isinstance(exit_code, int): return exit_code - elif hasattr(exit_code, 'rc'): + elif hasattr(exit_code, "rc"): return exit_code.rc @@ -88,6 +92,7 @@ def main_sourced(shell, *args, **kwargs): activator_cls = _build_activator_cls(shell) except KeyError: from ..exceptions import CondaError + raise CondaError("%s is not a supported shell." % shell) activator = activator_cls(args) diff --git a/conda/cli/main_clean.py b/conda/cli/main_clean.py index 4c9fc9a0d35..03157117cf3 100644 --- a/conda/cli/main_clean.py +++ b/conda/cli/main_clean.py @@ -1,13 +1,16 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import sys from logging import getLogger from os import lstat, walk from os.path import isdir, join from typing import Any, Dict, Iterable, List, Tuple -import sys -from ..base.constants import CONDA_PACKAGE_EXTENSIONS, CONDA_TEMP_EXTENSIONS, CONDA_LOGS_DIR +from ..base.constants import ( + CONDA_LOGS_DIR, + CONDA_PACKAGE_EXTENSIONS, + CONDA_TEMP_EXTENSIONS, +) from ..base.context import context log = getLogger(__name__) @@ -55,6 +58,7 @@ def _rm_rf(*parts: str, verbose: bool, verbosity: bool) -> None: else: log.info("%r", e) + def find_tarballs() -> Dict[str, Any]: warnings: List[Tuple[str, Exception]] = [] pkg_sizes: Dict[str, Dict[str, int]] = {} @@ -124,8 +128,8 @@ def rm_pkgs( dry_run: bool, name: str, ) -> None: - from .common import confirm_yn from ..utils import human_bytes + from .common import confirm_yn if verbose and warnings: for fn, exception in warnings: @@ -175,7 +179,9 @@ def find_index_cache() -> List[str]: def find_pkgs_dirs() -> List[str]: from ..core.package_cache_data import PackageCacheData - return [pc.pkgs_dir for pc in PackageCacheData.writable_caches() if isdir(pc.pkgs_dir)] + return [ + pc.pkgs_dir for pc in PackageCacheData.writable_caches() if isdir(pc.pkgs_dir) + ] def find_tempfiles(paths: Iterable[str]) -> List[str]: @@ -267,7 +273,9 @@ def _execute(args, parser): ): from ..exceptions import ArgumentError - raise ArgumentError("At least one removal target must be given. See 'conda clean --help'.") + raise ArgumentError( + "At least one removal target must be given. See 'conda clean --help'." + ) if args.tarballs or args.all: json_result["tarballs"] = tars = find_tarballs() @@ -295,6 +303,7 @@ def _execute(args, parser): def execute(args, parser): from .common import stdout_json + json_result = _execute(args, parser) if context.json: stdout_json(json_result) diff --git a/conda/cli/main_compare.py b/conda/cli/main_compare.py index 8e98d576220..48c19a3fe94 100644 --- a/conda/cli/main_compare.py +++ b/conda/cli/main_compare.py @@ -1,34 +1,40 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import logging import os from os.path import abspath, expanduser, expandvars -from .common import stdout_json +from conda_env import specs + from ..base.context import context from ..core.prefix_data import PrefixData from ..exceptions import EnvironmentLocationNotFound, SpecNotFound from ..gateways.connection.session import CONDA_SESSION_SCHEMES from ..gateways.disk.test import is_conda_environment from ..models.match_spec import MatchSpec -from conda_env import specs +from .common import stdout_json log = logging.getLogger(__name__) + def get_packages(prefix): if not os.path.isdir(prefix): raise EnvironmentLocationNotFound(prefix) - return sorted(PrefixData(prefix, pip_interop_enabled=True).iter_records(), - key=lambda x: x.name) + return sorted( + PrefixData(prefix, pip_interop_enabled=True).iter_records(), + key=lambda x: x.name, + ) + def _get_name_tuple(pkg): return pkg.name, pkg + def _to_str(pkg): return f"{pkg.name}=={pkg.version}={pkg.build}" + def compare_packages(active_pkgs, specification_pkgs): output = [] res = 0 @@ -39,19 +45,25 @@ def compare_packages(active_pkgs, specification_pkgs): if name in active_pkgs: if not pkg_spec.match(active_pkgs[name]): ok = False - output.append("{} found but mismatch. Specification pkg: {}, Running pkg: {}" - .format(name, pkg, _to_str(active_pkgs[name]))) + output.append( + "{} found but mismatch. Specification pkg: {}, Running pkg: {}".format( + name, pkg, _to_str(active_pkgs[name]) + ) + ) else: ok = False output.append(f"{name} not found") if ok: - output.append("Success. All the packages in the \ + output.append( + "Success. All the packages in the \ specification file are present in the environment \ -with matching version and build string.") +with matching version and build string." + ) else: res = 1 return res, output + def execute(args, parser): prefix = context.target_prefix if not is_conda_environment(prefix): @@ -74,16 +86,16 @@ def execute(args, parser): active_pkgs = dict(map(_get_name_tuple, get_packages(prefix))) specification_pkgs = [] - if 'conda' in env.dependencies: - specification_pkgs = specification_pkgs + env.dependencies['conda'] - if 'pip' in env.dependencies: - specification_pkgs = specification_pkgs + env.dependencies['pip'] + if "conda" in env.dependencies: + specification_pkgs = specification_pkgs + env.dependencies["conda"] + if "pip" in env.dependencies: + specification_pkgs = specification_pkgs + env.dependencies["pip"] exitcode, output = compare_packages(active_pkgs, specification_pkgs) if context.json: stdout_json(output) else: - print('\n'.join(map(str, output))) + print("\n".join(map(str, output))) return exitcode diff --git a/conda/cli/main_config.py b/conda/cli/main_config.py index 8f13ad6feb3..ba96514fdd2 100644 --- a/conda/cli/main_config.py +++ b/conda/cli/main_config.py @@ -1,21 +1,26 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from collections.abc import Mapping, Sequence import json +import os +import sys +from collections.abc import Mapping, Sequence from itertools import chain from logging import getLogger -import os from os.path import isfile, join -import sys from textwrap import wrap from conda.common.iterators import groupby_to_dict as groupby from .. import CondaError from ..auxlib.entity import EntityEncoder -from ..base.constants import (ChannelPriority, DepsModifier, PathConflict, SafetyChecks, - UpdateModifier, SatSolverChoice) +from ..base.constants import ( + ChannelPriority, + DepsModifier, + PathConflict, + SafetyChecks, + SatSolverChoice, + UpdateModifier, +) from ..base.context import context, sys_rc_path, user_rc_path from ..common.compat import isiterable from ..common.configuration import pretty_list, pretty_map @@ -25,6 +30,7 @@ def execute(args, parser): from ..exceptions import CouldntParseError + try: execute_config(args, parser) except (CouldntParseError, NotImplementedError) as e: @@ -54,40 +60,46 @@ def format_dict(d): def parameter_description_builder(name): builder = [] details = context.describe_parameter(name) - aliases = details['aliases'] - string_delimiter = details.get('string_delimiter') - element_types = details['element_types'] - default_value_str = json.dumps(details['default_value'], cls=EntityEncoder) + aliases = details["aliases"] + string_delimiter = details.get("string_delimiter") + element_types = details["element_types"] + default_value_str = json.dumps(details["default_value"], cls=EntityEncoder) if details["parameter_type"] == "primitive": - builder.append("{} ({})".format(name, ", ".join(sorted({et for et in element_types})))) + builder.append( + "{} ({})".format(name, ", ".join(sorted({et for et in element_types}))) + ) else: builder.append( "{} ({}: {})".format( - name, details["parameter_type"], ", ".join(sorted({et for et in element_types})) + name, + details["parameter_type"], + ", ".join(sorted({et for et in element_types})), ) ) if aliases: - builder.append(" aliases: %s" % ', '.join(aliases)) + builder.append(" aliases: %s" % ", ".join(aliases)) if string_delimiter: builder.append(" env var string delimiter: '%s'" % string_delimiter) - builder.extend(' ' + line for line in wrap(details['description'], 70)) + builder.extend(" " + line for line in wrap(details["description"], 70)) - builder.append('') - builder = ['# ' + line for line in builder] + builder.append("") + builder = ["# " + line for line in builder] - builder.extend(yaml_round_trip_dump({name: json.loads(default_value_str)}).strip().split('\n')) + builder.extend( + yaml_round_trip_dump({name: json.loads(default_value_str)}).strip().split("\n") + ) - builder = ['# ' + line for line in builder] - builder.append('') + builder = ["# " + line for line in builder] + builder.append("") return builder def describe_all_parameters(): builder = [] - skip_categories = ('CLI-only', 'Hidden and Undocumented') + skip_categories = ("CLI-only", "Hidden and Undocumented") for category, parameter_names in context.category_map.items(): if category in skip_categories: continue @@ -96,7 +108,9 @@ def describe_all_parameters(): builder.append("# ######################################################") builder.append("") builder.extend( - chain.from_iterable(parameter_description_builder(name) for name in parameter_names) + chain.from_iterable( + parameter_description_builder(name) for name in parameter_names + ) ) builder.append("") return "\n".join(builder) @@ -115,11 +129,17 @@ def print_config_item(key, value): # recreate the same file. numitems = len(value) for q, item in enumerate(reversed(value)): - if key == "channels" and q in (0, numitems-1): - stdout_write(" ".join(( - "--add", key, repr(item), - " # lowest priority" if q == 0 else " # highest priority" - ))) + if key == "channels" and q in (0, numitems - 1): + stdout_write( + " ".join( + ( + "--add", + key, + repr(item), + " # lowest priority" if q == 0 else " # highest priority", + ) + ) + ) else: stdout_write(" ".join(("--add", key, repr(item)))) @@ -132,17 +152,22 @@ def execute_config(args, parser): if args.show_sources: if context.json: - stdout_write(json.dumps( - context.collect_all(), sort_keys=True, indent=2, separators=(',', ': '), - cls=EntityEncoder - )) + stdout_write( + json.dumps( + context.collect_all(), + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) + ) else: lines = [] for source, reprs in context.collect_all().items(): lines.append("==> %s <==" % source) lines.extend(format_dict(reprs)) - lines.append('') - stdout_write('\n'.join(lines)) + lines.append("") + stdout_write("\n".join(lines)) return if args.show is not None: @@ -151,37 +176,48 @@ def execute_config(args, parser): all_names = context.list_parameters() not_params = set(paramater_names) - set(all_names) if not_params: - from ..exceptions import ArgumentError from ..common.io import dashlist - raise ArgumentError("Invalid configuration parameters: %s" % dashlist(not_params)) + from ..exceptions import ArgumentError + + raise ArgumentError( + "Invalid configuration parameters: %s" % dashlist(not_params) + ) else: paramater_names = context.list_parameters() d = {key: getattr(context, key) for key in paramater_names} if context.json: - stdout_write(json.dumps( - d, sort_keys=True, indent=2, separators=(',', ': '), cls=EntityEncoder - )) + stdout_write( + json.dumps( + d, + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) + ) else: # Add in custom formatting - if 'custom_channels' in d: - d['custom_channels'] = { + if "custom_channels" in d: + d["custom_channels"] = { channel.name: f"{channel.scheme}://{channel.location}" - for channel in d['custom_channels'].values() + for channel in d["custom_channels"].values() } - if 'custom_multichannels' in d: + if "custom_multichannels" in d: from ..common.io import dashlist - d['custom_multichannels'] = { + + d["custom_multichannels"] = { multichannel_name: dashlist(channels, indent=4) - for multichannel_name, channels in d['custom_multichannels'].items() + for multichannel_name, channels in d["custom_multichannels"].items() } if "channel_settings" in d: ident = " " * 4 d["channel_settings"] = tuple( - f"\n{ident}".join(format_dict(mapping)) for mapping in d["channel_settings"] + f"\n{ident}".join(format_dict(mapping)) + for mapping in d["channel_settings"] ) - stdout_write('\n'.join(format_dict(d))) + stdout_write("\n".join(format_dict(d))) context.validate_configuration() return @@ -191,14 +227,22 @@ def execute_config(args, parser): all_names = context.list_parameters() not_params = set(paramater_names) - set(all_names) if not_params: - from ..exceptions import ArgumentError from ..common.io import dashlist - raise ArgumentError("Invalid configuration parameters: %s" % dashlist(not_params)) + from ..exceptions import ArgumentError + + raise ArgumentError( + "Invalid configuration parameters: %s" % dashlist(not_params) + ) if context.json: - stdout_write(json.dumps( - [context.describe_parameter(name) for name in paramater_names], - sort_keys=True, indent=2, separators=(',', ': '), cls=EntityEncoder - )) + stdout_write( + json.dumps( + [context.describe_parameter(name) for name in paramater_names], + sort_keys=True, + indent=2, + separators=(",", ": "), + cls=EntityEncoder, + ) + ) else: builder = [] builder.extend( @@ -237,8 +281,8 @@ def execute_config(args, parser): if args.system: rc_path = sys_rc_path elif args.env: - if 'CONDA_PREFIX' in os.environ: - rc_path = join(os.environ['CONDA_PREFIX'], '.condarc') + if "CONDA_PREFIX" in os.environ: + rc_path = join(os.environ["CONDA_PREFIX"], ".condarc") else: rc_path = user_rc_path elif args.file: @@ -251,13 +295,15 @@ def execute_config(args, parser): with open(rc_path) as fh: data = fh.read().strip() if data: - raise CondaError("The file '%s' " - "already contains configuration information.\n" - "Remove the file to proceed.\n" - "Use `conda config --describe` to display default configuration." - % rc_path) + raise CondaError( + "The file '%s' " + "already contains configuration information.\n" + "Remove the file to proceed.\n" + "Use `conda config --describe` to display default configuration." + % rc_path + ) - with open(rc_path, 'w') as fh: + with open(rc_path, "w") as fh: fh.write(describe_all_parameters()) return @@ -273,11 +319,13 @@ def execute_config(args, parser): else: rc_config = {} - grouped_paramaters = groupby(lambda p: context.describe_parameter(p)['parameter_type'], - context.list_parameters()) - primitive_parameters = grouped_paramaters['primitive'] - sequence_parameters = grouped_paramaters['sequence'] - map_parameters = grouped_paramaters['map'] + grouped_paramaters = groupby( + lambda p: context.describe_parameter(p)["parameter_type"], + context.list_parameters(), + ) + primitive_parameters = grouped_paramaters["primitive"] + sequence_parameters = grouped_paramaters["sequence"] + map_parameters = grouped_paramaters["map"] all_parameters = primitive_parameters + sequence_parameters + map_parameters # Get @@ -323,31 +371,36 @@ def execute_config(args, parser): rc_config.update(parsed) except Exception: # pragma: no cover from ..exceptions import ParseError + raise ParseError("invalid yaml content:\n%s" % content) # prepend, append, add for arg, prepend in zip((args.prepend, args.append), (True, False)): for key, item in arg: - key, subkey = key.split('.', 1) if '.' in key else (key, None) - if key == 'channels' and key not in rc_config: - rc_config[key] = ['defaults'] + key, subkey = key.split(".", 1) if "." in key else (key, None) + if key == "channels" and key not in rc_config: + rc_config[key] = ["defaults"] if key in sequence_parameters: arglist = rc_config.setdefault(key, []) elif key in map_parameters: arglist = rc_config.setdefault(key, {}).setdefault(subkey, []) else: from ..exceptions import CondaValueError - raise CondaValueError("Key '%s' is not a known sequence parameter." % key) - if not (isinstance(arglist, Sequence) and not - isinstance(arglist, str)): + + raise CondaValueError( + "Key '%s' is not a known sequence parameter." % key + ) + if not (isinstance(arglist, Sequence) and not isinstance(arglist, str)): from ..exceptions import CouldntParseError + bad = rc_config[key].__class__.__name__ raise CouldntParseError(f"key {key!r} should be a list, not {bad}.") if item in arglist: message_key = key + "." + subkey if subkey is not None else key # Right now, all list keys should not contain duplicates message = "Warning: '{}' already in '{}' list, moving to the {}".format( - item, message_key, "top" if prepend else "bottom") + item, message_key, "top" if prepend else "bottom" + ) if subkey is None: arglist = rc_config[key] = [p for p in arglist if p != item] else: @@ -360,7 +413,7 @@ def execute_config(args, parser): # Set for key, item in args.set: - key, subkey = key.split('.', 1) if '.' in key else (key, None) + key, subkey = key.split(".", 1) if "." in key else (key, None) if key in primitive_parameters: value = context.typify_parameter(key, item, "--set parameter") rc_config[key] = value @@ -369,34 +422,37 @@ def execute_config(args, parser): argmap[subkey] = item else: from ..exceptions import CondaValueError + raise CondaValueError("Key '%s' is not a known primitive parameter." % key) # Remove for key, item in args.remove: - key, subkey = key.split('.', 1) if '.' in key else (key, None) + key, subkey = key.split(".", 1) if "." in key else (key, None) if key not in rc_config: - if key != 'channels': + if key != "channels": from ..exceptions import CondaKeyError + raise CondaKeyError(key, "key %r is not in the config file" % key) - rc_config[key] = ['defaults'] + rc_config[key] = ["defaults"] if item not in rc_config[key]: from ..exceptions import CondaKeyError - raise CondaKeyError(key, "%r is not in the %r key of the config file" % - (item, key)) + + raise CondaKeyError( + key, f"{item!r} is not in the {key!r} key of the config file" + ) rc_config[key] = [i for i in rc_config[key] if i != item] # Remove Key - for key, in args.remove_key: - key, subkey = key.split('.', 1) if '.' in key else (key, None) + for (key,) in args.remove_key: + key, subkey = key.split(".", 1) if "." in key else (key, None) if key not in rc_config: from ..exceptions import CondaKeyError - raise CondaKeyError(key, "key %r is not in the config file" % - key) + + raise CondaKeyError(key, "key %r is not in the config file" % key) del rc_config[key] # config.rc_keys if not args.get: - # Add representers for enums. # Because a representer cannot be added for the base Enum class (it must be added for # each specific Enum subclass - and because of import rules), I don't know of a better @@ -404,24 +460,34 @@ def execute_config(args, parser): def enum_representer(dumper, data): return dumper.represent_str(str(data)) - yaml.representer.RoundTripRepresenter.add_representer(SafetyChecks, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(PathConflict, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(DepsModifier, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(UpdateModifier, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(ChannelPriority, enum_representer) - yaml.representer.RoundTripRepresenter.add_representer(SatSolverChoice, enum_representer) + yaml.representer.RoundTripRepresenter.add_representer( + SafetyChecks, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + PathConflict, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + DepsModifier, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + UpdateModifier, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + ChannelPriority, enum_representer + ) + yaml.representer.RoundTripRepresenter.add_representer( + SatSolverChoice, enum_representer + ) try: - with open(rc_path, 'w') as rc: + with open(rc_path, "w") as rc: rc.write(yaml_round_trip_dump(rc_config)) except OSError as e: - raise CondaError('Cannot write to condarc file at %s\n' - 'Caused by %r' % (rc_path, e)) + raise CondaError( + "Cannot write to condarc file at %s\n" "Caused by %r" % (rc_path, e) + ) if context.json: from .common import stdout_json_success - stdout_json_success( - rc_path=rc_path, - warnings=json_warnings, - get=json_get - ) + + stdout_json_success(rc_path=rc_path, warnings=json_warnings, get=json_get) diff --git a/conda/cli/main_create.py b/conda/cli/main_create.py index dda78e59647..668a4c5b056 100644 --- a/conda/cli/main_create.py +++ b/conda/cli/main_create.py @@ -1,17 +1,16 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger from os.path import isdir -from .common import confirm_yn -from .install import install from ..base.context import context from ..common.path import paths_equal from ..exceptions import CondaValueError from ..gateways.disk.delete import rm_rf from ..gateways.disk.test import is_conda_environment from ..notices import notices +from .common import confirm_yn +from .install import install log = getLogger(__name__) @@ -24,18 +23,24 @@ def execute(args, parser): if context.dry_run: # Taking the "easy" way out, rather than trying to fake removing # the existing environment before creating a new one. - raise CondaValueError("Cannot `create --dry-run` with an existing conda environment") - confirm_yn("WARNING: A conda environment already exists at '%s'\n" - "Remove existing environment" % context.target_prefix, - default='no', - dry_run=False) + raise CondaValueError( + "Cannot `create --dry-run` with an existing conda environment" + ) + confirm_yn( + "WARNING: A conda environment already exists at '%s'\n" + "Remove existing environment" % context.target_prefix, + default="no", + dry_run=False, + ) log.info("Removing existing environment %s", context.target_prefix) rm_rf(context.target_prefix) elif isdir(context.target_prefix): - confirm_yn("WARNING: A directory already exists at the target location '%s'\n" - "but it is not a conda environment.\n" - "Continue creating environment" % context.target_prefix, - default='no', - dry_run=False) + confirm_yn( + "WARNING: A directory already exists at the target location '%s'\n" + "but it is not a conda environment.\n" + "Continue creating environment" % context.target_prefix, + default="no", + dry_run=False, + ) - install(args, parser, 'create') + install(args, parser, "create") diff --git a/conda/cli/main_info.py b/conda/cli/main_info.py index 49be7f1ac04..0afa3fe1b1d 100644 --- a/conda/cli/main_info.py +++ b/conda/cli/main_info.py @@ -1,14 +1,14 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import json -from logging import getLogger import os -from os.path import exists, expanduser, isfile, join import re import sys +from logging import getLogger +from os.path import exists, expanduser, isfile, join -from .common import print_envs_list, stdout_json -from .. import CONDA_PACKAGE_ROOT, __version__ as conda_version +from .. import CONDA_PACKAGE_ROOT +from .. import __version__ as conda_version from ..base.context import context, env_name, sys_rc_path, user_rc_path from ..common.compat import on_win from ..common.url import mask_anaconda_token @@ -17,6 +17,7 @@ from ..models.channel import all_channel_urls, offline_keep from ..models.match_spec import MatchSpec from ..utils import human_bytes +from .common import print_envs_list, stdout_json log = getLogger(__name__) @@ -25,28 +26,38 @@ def get_user_site(): # pragma: no cover site_dirs = [] try: if not on_win: - if exists(expanduser('~/.local/lib')): - python_re = re.compile(r'python\d\.\d') - for path in os.listdir(expanduser('~/.local/lib/')): + if exists(expanduser("~/.local/lib")): + python_re = re.compile(r"python\d\.\d") + for path in os.listdir(expanduser("~/.local/lib/")): if python_re.match(path): site_dirs.append("~/.local/lib/%s" % path) else: - if 'APPDATA' not in os.environ: + if "APPDATA" not in os.environ: return site_dirs APPDATA = os.environ["APPDATA"] if exists(join(APPDATA, "Python")): site_dirs = [ - join(APPDATA, "Python", i) for i in os.listdir(join(APPDATA, "PYTHON")) + join(APPDATA, "Python", i) + for i in os.listdir(join(APPDATA, "PYTHON")) ] except OSError as e: - log.debug('Error accessing user site directory.\n%r', e) + log.debug("Error accessing user site directory.\n%r", e) return site_dirs -IGNORE_FIELDS = {'files', 'auth', 'preferred_env', 'priority'} +IGNORE_FIELDS = {"files", "auth", "preferred_env", "priority"} -SKIP_FIELDS = IGNORE_FIELDS | {'name', 'version', 'build', 'build_number', - 'channel', 'schannel', 'size', 'fn', 'depends'} +SKIP_FIELDS = IGNORE_FIELDS | { + "name", + "version", + "build", + "build_number", + "channel", + "schannel", + "size", + "fn", + "depends", +} def dump_record(pkg): @@ -54,7 +65,6 @@ def dump_record(pkg): def pretty_package(prec): - pkg = dump_record(prec) d = { "file name": prec.fn, @@ -71,12 +81,12 @@ def pretty_package(prec): print() header = "{} {} {}".format(d["name"], d["version"], d["build string"]) print(header) - print('-'*len(header)) + print("-" * len(header)) for key in d: print("%-12s: %s" % (key, d[key])) - print('dependencies:') - for dep in pkg['depends']: - print(' %s' % dep) + print("dependencies:") + for dep in pkg["depends"]: + print(" %s" % dep) def print_package_info(packages): @@ -105,6 +115,7 @@ def print_package_info(packages): def get_info_dict(system=False): try: from requests import __version__ as requests_version + # These environment variables can influence requests' behavior, along with configuration # in a .netrc file # CURL_CA_BUNDLE @@ -139,11 +150,10 @@ def get_info_dict(system=False): channels = list(all_channel_urls(context.channels)) if not context.json: - channels = [c + ('' if offline_keep(c) else ' (offline)') - for c in channels] + channels = [c + ("" if offline_keep(c) else " (offline)") for c in channels] channels = [mask_anaconda_token(c) for c in channels] - netrc_file = os.environ.get('NETRC') + netrc_file = os.environ.get("NETRC") if not netrc_file: user_netrc = expanduser("~/.netrc") if isfile(user_netrc): @@ -174,7 +184,7 @@ def get_info_dict(system=False): # is_foreign=bool(foreign), offline=context.offline, envs=[], - python_version='.'.join(map(str, sys.version_info)), + python_version=".".join(map(str, sys.version_info)), requests_version=requests_version, user_agent=context.user_agent, conda_location=CONDA_PACKAGE_ROOT, @@ -184,10 +194,11 @@ def get_info_dict(system=False): ) if on_win: from ..common._os.windows import is_admin_on_windows - info_dict['is_windows_admin'] = is_admin_on_windows() + + info_dict["is_windows_admin"] = is_admin_on_windows() else: - info_dict['UID'] = os.geteuid() - info_dict['GID'] = os.getegid() + info_dict["UID"] = os.geteuid() + info_dict["GID"] = os.getegid() env_var_keys = { "CIO_TEST", @@ -198,62 +209,70 @@ def get_info_dict(system=False): } # add all relevant env vars, e.g. startswith('CONDA') or endswith('PATH') - env_var_keys.update(v for v in os.environ if v.upper().startswith('CONDA')) - env_var_keys.update(v for v in os.environ if v.upper().startswith('PYTHON')) - env_var_keys.update(v for v in os.environ if v.upper().endswith('PATH')) - env_var_keys.update(v for v in os.environ if v.upper().startswith('SUDO')) + env_var_keys.update(v for v in os.environ if v.upper().startswith("CONDA")) + env_var_keys.update(v for v in os.environ if v.upper().startswith("PYTHON")) + env_var_keys.update(v for v in os.environ if v.upper().endswith("PATH")) + env_var_keys.update(v for v in os.environ if v.upper().startswith("SUDO")) - env_vars = {ev: os.getenv(ev, os.getenv(ev.lower(), '')) for ev in env_var_keys} - - proxy_keys = (v for v in os.environ if v.upper().endswith('PROXY')) - env_vars.update({ev: '' for ev in proxy_keys}) + env_vars = { + ev: os.getenv(ev, os.getenv(ev.lower(), "")) for ev in env_var_keys + } - info_dict.update({ - 'sys.version': sys.version, - 'sys.prefix': sys.prefix, - 'sys.executable': sys.executable, - 'site_dirs': get_user_site(), - 'env_vars': env_vars, - }) + proxy_keys = (v for v in os.environ if v.upper().endswith("PROXY")) + env_vars.update({ev: "" for ev in proxy_keys}) + + info_dict.update( + { + "sys.version": sys.version, + "sys.prefix": sys.prefix, + "sys.executable": sys.executable, + "site_dirs": get_user_site(), + "env_vars": env_vars, + } + ) return info_dict def get_env_vars_str(info_dict): from textwrap import wrap + builder = [] builder.append("%23s:" % "environment variables") - env_vars = info_dict.get('env_vars', {}) + env_vars = info_dict.get("env_vars", {}) for key in sorted(env_vars): value = wrap(env_vars[key]) first_line = value[0] if len(value) else "" other_lines = value[1:] if len(value) > 1 else () builder.append("%25s=%s" % (key, first_line)) for val in other_lines: - builder.append(' ' * 26 + val) - return '\n'.join(builder) + builder.append(" " * 26 + val) + return "\n".join(builder) def get_main_info_str(info_dict): - for key in 'pkgs_dirs', 'envs_dirs', 'channels', 'config_files': - info_dict['_' + key] = ('\n' + 26 * ' ').join(info_dict[key]) + for key in "pkgs_dirs", "envs_dirs", "channels", "config_files": + info_dict["_" + key] = ("\n" + 26 * " ").join(info_dict[key]) - info_dict['_virtual_pkgs'] = ('\n' + 26 * ' ').join([ - '%s=%s=%s' % tuple(x) for x in info_dict['virtual_pkgs']]) - info_dict['_rtwro'] = ('writable' if info_dict['root_writable'] else 'read only') + info_dict["_virtual_pkgs"] = ("\n" + 26 * " ").join( + ["%s=%s=%s" % tuple(x) for x in info_dict["virtual_pkgs"]] + ) + info_dict["_rtwro"] = "writable" if info_dict["root_writable"] else "read only" format_param = lambda nm, val: "%23s : %s" % (nm, val) - builder = [''] + builder = [""] - if info_dict['active_prefix_name']: - builder.append(format_param('active environment', info_dict['active_prefix_name'])) - builder.append(format_param('active env location', info_dict['active_prefix'])) + if info_dict["active_prefix_name"]: + builder.append( + format_param("active environment", info_dict["active_prefix_name"]) + ) + builder.append(format_param("active env location", info_dict["active_prefix"])) else: - builder.append(format_param('active environment', info_dict['active_prefix'])) + builder.append(format_param("active environment", info_dict["active_prefix"])) - if info_dict['conda_shlvl'] >= 0: - builder.append(format_param('shell level', info_dict['conda_shlvl'])) + if info_dict["conda_shlvl"] >= 0: + builder.append(format_param("shell level", info_dict["conda_shlvl"])) builder.extend( ( @@ -278,34 +297,40 @@ def get_main_info_str(info_dict): ) if on_win: - builder.append(format_param("administrator", info_dict['is_windows_admin'])) + builder.append(format_param("administrator", info_dict["is_windows_admin"])) else: - builder.append(format_param("UID:GID", "{}:{}".format(info_dict["UID"], info_dict["GID"]))) + builder.append( + format_param("UID:GID", "{}:{}".format(info_dict["UID"], info_dict["GID"])) + ) - builder.extend(( - format_param('netrc file', info_dict['netrc_file']), - format_param('offline mode', info_dict['offline']), - )) + builder.extend( + ( + format_param("netrc file", info_dict["netrc_file"]), + format_param("offline mode", info_dict["offline"]), + ) + ) - builder.append('') - return '\n'.join(builder) + builder.append("") + return "\n".join(builder) def execute(args, parser): if args.base: if context.json: - stdout_json({'root_prefix': context.root_prefix}) + stdout_json({"root_prefix": context.root_prefix}) else: print(f"{context.root_prefix}") return if args.packages: from ..resolve import ResolvePackageNotFound + try: print_package_info(args.packages) return except ResolvePackageNotFound as e: # pragma: no cover from ..exceptions import PackagesNotFoundError + raise PackagesNotFoundError(e.bad_deps) if args.unsafe_channels: @@ -315,26 +340,30 @@ def execute(args, parser): print(json.dumps({"channels": context.channels})) return 0 - options = 'envs', 'system' + options = "envs", "system" if args.all or context.json: for option in options: setattr(args, option, True) info_dict = get_info_dict(args.system) - if (args.all or all(not getattr(args, opt) for opt in options)) and not context.json: + if ( + args.all or all(not getattr(args, opt) for opt in options) + ) and not context.json: stdout_logger = getLogger("conda.stdoutlog") stdout_logger.info(get_main_info_str(info_dict)) stdout_logger.info("\n") if args.envs: from ..core.envs_manager import list_all_known_prefixes - info_dict['envs'] = list_all_known_prefixes() - print_envs_list(info_dict['envs'], not context.json) + + info_dict["envs"] = list_all_known_prefixes() + print_envs_list(info_dict["envs"], not context.json) if args.system: if not context.json: from .find_commands import find_commands, find_executable + print("sys.version: %s..." % (sys.version[:40])) print("sys.prefix: %s" % sys.prefix) print("sys.executable: %s" % sys.executable) @@ -348,10 +377,10 @@ def execute(args, parser): else: print() for site_dir in site_dirs[1:]: - print(' %s' % site_dir) + print(" %s" % site_dir) print() - for name, value in sorted(info_dict['env_vars'].items()): + for name, value in sorted(info_dict["env_vars"].items()): print(f"{name}: {value}") print() diff --git a/conda/cli/main_init.py b/conda/cli/main_init.py index 4e0d9346fa7..bc122849446 100644 --- a/conda/cli/main_init.py +++ b/conda/cli/main_init.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger from ..base.context import context @@ -30,5 +29,11 @@ def execute(args, parser): else: for_user = args.user and not args.system anaconda_prompt = on_win and args.anaconda_prompt - return initialize(context.conda_prefix, selected_shells, for_user, args.system, - anaconda_prompt, args.reverse) + return initialize( + context.conda_prefix, + selected_shells, + for_user, + args.system, + anaconda_prompt, + args.reverse, + ) diff --git a/conda/cli/main_install.py b/conda/cli/main_install.py index d712a2f6b25..79a423aaaf1 100644 --- a/conda/cli/main_install.py +++ b/conda/cli/main_install.py @@ -1,20 +1,22 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import sys -from .install import install from ..base.context import context from ..notices import notices +from .install import install @notices def execute(args, parser): if context.force: - print("\n\n" - "WARNING: The --force flag will be removed in a future conda release.\n" - " See 'conda install --help' for details about the --force-reinstall\n" - " and --clobber flags.\n" - "\n", file=sys.stderr) + print( + "\n\n" + "WARNING: The --force flag will be removed in a future conda release.\n" + " See 'conda install --help' for details about the --force-reinstall\n" + " and --clobber flags.\n" + "\n", + file=sys.stderr, + ) - install(args, parser, 'install') + install(args, parser, "install") diff --git a/conda/cli/main_list.py b/conda/cli/main_list.py index e599d33213c..a09a98a1344 100644 --- a/conda/cli/main_list.py +++ b/conda/cli/main_list.py @@ -1,24 +1,23 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import logging -from os.path import isdir, isfile import re +from os.path import isdir, isfile -from .common import disp_features, stdout_json from ..base.constants import DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL from ..base.context import context from ..core.prefix_data import PrefixData from ..gateways.disk.test import is_conda_environment from ..history import History +from .common import disp_features, stdout_json log = logging.getLogger(__name__) def print_export_header(subdir): - print('# This file may be used to create an environment using:') - print('# $ conda create --name --file ') - print('# platform: %s' % subdir) + print("# This file may be used to create an environment using:") + print("# $ conda create --name --file ") + print("# platform: %s" % subdir) def get_packages(installed, regex): @@ -29,25 +28,26 @@ def get_packages(installed, regex): yield prefix_rec -def list_packages(prefix, regex=None, format='human', - show_channel_urls=None): +def list_packages(prefix, regex=None, format="human", show_channel_urls=None): res = 0 result = [] - if format == 'human': - result.append('# packages in environment at %s:' % prefix) - result.append('#') - result.append('# %-23s %-15s %15s Channel' % ("Name", "Version", "Build")) + if format == "human": + result.append("# packages in environment at %s:" % prefix) + result.append("#") + result.append("# %-23s %-15s %15s Channel" % ("Name", "Version", "Build")) - installed = sorted(PrefixData(prefix, pip_interop_enabled=True).iter_records(), - key=lambda x: x.name) + installed = sorted( + PrefixData(prefix, pip_interop_enabled=True).iter_records(), + key=lambda x: x.name, + ) for prec in get_packages(installed, regex) if regex else installed: - if format == 'canonical': + if format == "canonical": result.append(prec.dist_fields_dump() if context.json else prec.dist_str()) continue - if format == 'export': - result.append('='.join((prec.name, prec.version, prec.build))) + if format == "export": + result.append("=".join((prec.name, prec.version, prec.build))) continue features = set(prec.get("features") or ()) @@ -55,31 +55,42 @@ def list_packages(prefix, regex=None, format='human', disp += " %s" % disp_features(features) schannel = prec.get("schannel") show_channel_urls = show_channel_urls or context.show_channel_urls - if (show_channel_urls or show_channel_urls is None - and schannel != DEFAULTS_CHANNEL_NAME): - disp += ' %s' % schannel + if ( + show_channel_urls + or show_channel_urls is None + and schannel != DEFAULTS_CHANNEL_NAME + ): + disp += " %s" % schannel result.append(disp) return res, result -def print_packages(prefix, regex=None, format='human', piplist=False, - json=False, show_channel_urls=None): +def print_packages( + prefix, + regex=None, + format="human", + piplist=False, + json=False, + show_channel_urls=None, +): if not isdir(prefix): from ..exceptions import EnvironmentLocationNotFound + raise EnvironmentLocationNotFound(prefix) if not json: - if format == 'export': + if format == "export": print_export_header(context.subdir) - exitcode, output = list_packages(prefix, regex, format=format, - show_channel_urls=show_channel_urls) + exitcode, output = list_packages( + prefix, regex, format=format, show_channel_urls=show_channel_urls + ) if context.json: stdout_json(output) else: - print('\n'.join(map(str, output))) + print("\n".join(map(str, output))) return exitcode @@ -87,27 +98,29 @@ def print_packages(prefix, regex=None, format='human', piplist=False, def print_explicit(prefix, add_md5=False): if not isdir(prefix): from ..exceptions import EnvironmentLocationNotFound + raise EnvironmentLocationNotFound(prefix) print_export_header(context.subdir) print("@EXPLICIT") for prefix_record in PrefixData(prefix).iter_records_sorted(): - url = prefix_record.get('url') + url = prefix_record.get("url") if not url or url.startswith(UNKNOWN_CHANNEL): - print('# no URL for: %s' % prefix_record['fn']) + print("# no URL for: %s" % prefix_record["fn"]) continue - md5 = prefix_record.get('md5') - print(url + ('#%s' % md5 if add_md5 and md5 else '')) + md5 = prefix_record.get("md5") + print(url + ("#%s" % md5 if add_md5 and md5 else "")) def execute(args, parser): prefix = context.target_prefix if not is_conda_environment(prefix): from ..exceptions import EnvironmentLocationNotFound + raise EnvironmentLocationNotFound(prefix) regex = args.regex if args.full_name: - regex = r'^%s$' % regex + regex = r"^%s$" % regex if args.revisions: h = History(prefix) @@ -118,6 +131,7 @@ def execute(args, parser): stdout_json(h.object_log()) else: from ..exceptions import PathNotFoundError + raise PathNotFoundError(h.path) return @@ -126,15 +140,20 @@ def execute(args, parser): return if args.canonical: - format = 'canonical' + format = "canonical" elif args.export: - format = 'export' + format = "export" else: - format = 'human' + format = "human" if context.json: - format = 'canonical' - - exitcode = print_packages(prefix, regex, format, piplist=args.pip, - json=context.json, - show_channel_urls=context.show_channel_urls) + format = "canonical" + + exitcode = print_packages( + prefix, + regex, + format, + piplist=args.pip, + json=context.json, + show_channel_urls=context.show_channel_urls, + ) return exitcode diff --git a/conda/cli/main_notices.py b/conda/cli/main_notices.py index 36f8824c3cf..e67a704081d 100644 --- a/conda/cli/main_notices.py +++ b/conda/cli/main_notices.py @@ -1,7 +1,6 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from argparse import Namespace, ArgumentParser +from argparse import ArgumentParser, Namespace from ..exceptions import CondaError from ..notices import core as notices diff --git a/conda/cli/main_package.py b/conda/cli/main_package.py index 3dc3f23ed64..bd894bbaa29 100644 --- a/conda/cli/main_package.py +++ b/conda/cli/main_package.py @@ -1,13 +1,12 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import hashlib import json import os -from os.path import abspath, basename, dirname, isdir, isfile, islink, join import re import tarfile import tempfile +from os.path import abspath, basename, dirname, isdir, isfile, islink, join from ..auxlib.entity import EntityEncoder from ..base.constants import CONDA_PACKAGE_EXTENSION_V1, PREFIX_PLACEHOLDER @@ -36,16 +35,15 @@ def remove(prefix, files): def execute(args, parser): - prefix = context.target_prefix if args.which: for path in args.which: for prec in which_package(path): - print('%-50s %s' % (path, prec.dist_str())) + print("%-50s %s" % (path, prec.dist_str())) return - print('# prefix:', prefix) + print("# prefix:", prefix) if args.reset: remove(prefix, untracked(prefix)) @@ -53,21 +51,23 @@ def execute(args, parser): if args.untracked: files = sorted(untracked(prefix)) - print('# untracked files: %d' % len(files)) + print("# untracked files: %d" % len(files)) for fn in files: print(fn) return - make_tarbz2(prefix, - name=args.pkg_name.lower(), - version=args.pkg_version, - build_number=int(args.pkg_build)) + make_tarbz2( + prefix, + name=args.pkg_name.lower(), + version=args.pkg_version, + build_number=int(args.pkg_build), + ) def get_installed_version(prefix, name): for info in PrefixData(prefix).iter_records(): - if info['name'] == name: - return str(info['version']) + if info["name"] == name: + return str(info["version"]) return None @@ -82,48 +82,49 @@ def create_info(name, version, build_number, requires_py): depends=[], ) if requires_py: - d['build'] = ('py%d%d_' % requires_py) + d['build'] - d['depends'].append('python %d.%d*' % requires_py) + d["build"] = ("py%d%d_" % requires_py) + d["build"] + d["depends"].append("python %d.%d*" % requires_py) return d -shebang_pat = re.compile(r'^#!.+$', re.M) +shebang_pat = re.compile(r"^#!.+$", re.M) + + def fix_shebang(tmp_dir, path): - if open(path, 'rb').read(2) != '#!': + if open(path, "rb").read(2) != "#!": return False with open(path) as fi: data = fi.read() m = shebang_pat.match(data) - if not (m and 'python' in m.group()): + if not (m and "python" in m.group()): return False - data = shebang_pat.sub('#!%s/bin/python' % PREFIX_PLACEHOLDER, - data, count=1) + data = shebang_pat.sub("#!%s/bin/python" % PREFIX_PLACEHOLDER, data, count=1) tmp_path = join(tmp_dir, basename(path)) - with open(tmp_path, 'w') as fo: + with open(tmp_path, "w") as fo: fo.write(data) - os.chmod(tmp_path, int('755', 8)) + os.chmod(tmp_path, int("755", 8)) return True def _add_info_dir(t, tmp_dir, files, has_prefix, info): - info_dir = join(tmp_dir, 'info') + info_dir = join(tmp_dir, "info") os.mkdir(info_dir) - with open(join(info_dir, 'files'), 'w') as fo: + with open(join(info_dir, "files"), "w") as fo: for f in files: - fo.write(f + '\n') + fo.write(f + "\n") - with open(join(info_dir, 'index.json'), 'w') as fo: + with open(join(info_dir, "index.json"), "w") as fo: json.dump(info, fo, indent=2, sort_keys=True, cls=EntityEncoder) if has_prefix: - with open(join(info_dir, 'has_prefix'), 'w') as fo: + with open(join(info_dir, "has_prefix"), "w") as fo: for f in has_prefix: - fo.write(f + '\n') + fo.write(f + "\n") for fn in os.listdir(info_dir): - t.add(join(info_dir, fn), 'info/' + fn) + t.add(join(info_dir, fn), "info/" + fn) def create_conda_pkg(prefix, files, info, tar_path, update_info=None): @@ -135,32 +136,31 @@ def create_conda_pkg(prefix, files, info, tar_path, update_info=None): warnings = [] has_prefix = [] tmp_dir = tempfile.mkdtemp() - t = tarfile.open(tar_path, 'w:bz2') - h = hashlib.new('sha1') + t = tarfile.open(tar_path, "w:bz2") + h = hashlib.new("sha1") for f in files: - assert not (f.startswith('/') or f.endswith('/') or '\\' in f or f == ''), f + assert not (f.startswith("/") or f.endswith("/") or "\\" in f or f == ""), f path = join(prefix, f) - if f.startswith('bin/') and fix_shebang(tmp_dir, path): + if f.startswith("bin/") and fix_shebang(tmp_dir, path): path = join(tmp_dir, basename(path)) has_prefix.append(f) t.add(path, f) - h.update(f.encode('utf-8')) - h.update(b'\x00') + h.update(f.encode("utf-8")) + h.update(b"\x00") if islink(path): link = os.readlink(path) if isinstance(link, str): - h.update(bytes(link, 'utf-8')) + h.update(bytes(link, "utf-8")) else: h.update(link) - if link.startswith('/'): - warnings.append('found symlink to absolute path: %s -> %s' % - (f, link)) + if link.startswith("/"): + warnings.append(f"found symlink to absolute path: {f} -> {link}") elif isfile(path): - h.update(open(path, 'rb').read()) - if path.endswith('.egg-link'): - warnings.append('found egg link: %s' % f) + h.update(open(path, "rb").read()) + if path.endswith(".egg-link"): + warnings.append("found egg link: %s" % f) - info['file_hash'] = h.hexdigest() + info["file_hash"] = h.hexdigest() if update_info: update_info(info) _add_info_dir(t, tmp_dir, files, has_prefix, info) @@ -169,8 +169,7 @@ def create_conda_pkg(prefix, files, info, tar_path, update_info=None): return warnings -def make_tarbz2(prefix, name='unknown', version='0.0', build_number=0, - files=None): +def make_tarbz2(prefix, name="unknown", version="0.0", build_number=0, files=None): if files is None: files = untracked(prefix) print("# files: %d" % len(files)) @@ -178,17 +177,17 @@ def make_tarbz2(prefix, name='unknown', version='0.0', build_number=0, print("# failed: nothing to do") return None - if any('/site-packages/' in f for f in files): - python_version = get_installed_version(prefix, 'python') + if any("/site-packages/" in f for f in files): + python_version = get_installed_version(prefix, "python") assert python_version is not None - requires_py = tuple(int(x) for x in python_version[:3].split('.')) + requires_py = tuple(int(x) for x in python_version[:3].split(".")) else: requires_py = False info = create_info(name, version, build_number, requires_py) - tarbz2_fn = ('%(name)s-%(version)s-%(build)s' % info) + CONDA_PACKAGE_EXTENSION_V1 + tarbz2_fn = ("%(name)s-%(version)s-%(build)s" % info) + CONDA_PACKAGE_EXTENSION_V1 create_conda_pkg(prefix, files, info, tarbz2_fn) - print('# success') + print("# success") print(tarbz2_fn) return tarbz2_fn @@ -203,10 +202,11 @@ def which_package(path): prefix = which_prefix(path) if prefix is None: from ..exceptions import CondaVerificationError + raise CondaVerificationError("could not determine conda prefix from: %s" % path) for prec in PrefixData(prefix).iter_records(): - if any(paths_equal(join(prefix, f), path) for f in prec['files'] or ()): + if any(paths_equal(join(prefix, f), path) for f in prec["files"] or ()): yield prec @@ -217,7 +217,7 @@ def which_prefix(path): """ prefix = abspath(path) while True: - if isdir(join(prefix, 'conda-meta')): + if isdir(join(prefix, "conda-meta")): # we found the it, so let's return it return prefix if prefix == dirname(prefix): diff --git a/conda/cli/main_pip.py b/conda/cli/main_pip.py index db33d389610..38add3390f6 100644 --- a/conda/cli/main_pip.py +++ b/conda/cli/main_pip.py @@ -1,26 +1,29 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from logging import getLogger import os import sys +from logging import getLogger -from .main import main as main_main from .. import CondaError from ..auxlib.ish import dals +from .main import main as main_main log = getLogger(__name__) def pip_installed_post_parse_hook(args, p): - if args.cmd not in ('init', 'info'): - raise CondaError(dals(""" + if args.cmd not in ("init", "info"): + raise CondaError( + dals( + """ Conda has not been initialized. To enable full conda functionality, please run 'conda init'. For additional information, see 'conda init --help'. - """)) + """ + ) + ) def main(*args, **kwargs): @@ -29,5 +32,5 @@ def main(*args, **kwargs): return main_main(*args, **kwargs) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/conda/cli/main_remove.py b/conda/cli/main_remove.py index 3d8c8077f61..9e5beedd264 100644 --- a/conda/cli/main_remove.py +++ b/conda/cli/main_remove.py @@ -1,29 +1,33 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import logging -from os.path import isfile, join import sys +from os.path import isfile, join -from .common import check_non_admin, specs_from_args -from .install import handle_txn from ..base.context import context from ..core.envs_manager import unregister_env from ..core.link import PrefixSetup, UnlinkLinkTransaction from ..core.prefix_data import PrefixData -from ..exceptions import CondaEnvironmentError, CondaValueError, DirectoryNotACondaEnvironmentError -from ..gateways.disk.delete import rm_rf, path_is_clean +from ..exceptions import ( + CondaEnvironmentError, + CondaValueError, + DirectoryNotACondaEnvironmentError, + PackagesNotFoundError, +) +from ..gateways.disk.delete import path_is_clean, rm_rf from ..models.match_spec import MatchSpec -from ..exceptions import PackagesNotFoundError +from .common import check_non_admin, specs_from_args +from .install import handle_txn log = logging.getLogger(__name__) def execute(args, parser): - if not (args.all or args.package_names): - raise CondaValueError('no package names supplied,\n' - ' try "conda remove -h" for more details') + raise CondaValueError( + "no package names supplied,\n" + ' try "conda remove -h" for more details' + ) prefix = context.target_prefix check_non_admin() @@ -50,13 +54,15 @@ def execute(args, parser): if args.all: if prefix == context.root_prefix: - raise CondaEnvironmentError('cannot remove root environment,\n' - ' add -n NAME or -p PREFIX option') - if not isfile(join(prefix, 'conda-meta', 'history')): + raise CondaEnvironmentError( + "cannot remove root environment,\n" + " add -n NAME or -p PREFIX option" + ) + if not isfile(join(prefix, "conda-meta", "history")): raise DirectoryNotACondaEnvironmentError(prefix) print("\nRemove all packages in environment %s:\n" % prefix, file=sys.stderr) - if 'package_names' in args: + if "package_names" in args: stp = PrefixSetup( target_prefix=prefix, unlink_precs=tuple(PrefixData(prefix).iter_records()), @@ -69,7 +75,9 @@ def execute(args, parser): try: handle_txn(txn, prefix, args, False, True) except PackagesNotFoundError: - print("No packages found in %s. Continuing environment removal" % prefix) + print( + "No packages found in %s. Continuing environment removal" % prefix + ) if not context.dry_run: rm_rf(prefix, clean_empty_parents=True) unregister_env(prefix) diff --git a/conda/cli/main_rename.py b/conda/cli/main_rename.py index b80b7ad16c8..ab88234f5f4 100644 --- a/conda/cli/main_rename.py +++ b/conda/cli/main_rename.py @@ -2,11 +2,11 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from functools import partial import os +from functools import partial -from ..base.context import context, locate_prefix_by_name, validate_prefix_name from ..base.constants import DRY_RUN_PREFIX +from ..base.context import context, locate_prefix_by_name, validate_prefix_name from ..cli import common, install from ..common.path import expand, paths_equal from ..exceptions import CondaEnvException @@ -54,7 +54,13 @@ def execute(args, _): def clone_and_remove(): actions: tuple[partial, ...] = ( - partial(install.clone, source, destination, quiet=context.quiet, json=context.json), + partial( + install.clone, + source, + destination, + quiet=context.quiet, + json=context.json, + ), partial(rm_rf, source), ) diff --git a/conda/cli/main_run.py b/conda/cli/main_run.py index f664f3b0bef..3efb1cad66a 100644 --- a/conda/cli/main_run.py +++ b/conda/cli/main_run.py @@ -1,16 +1,14 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - -from logging import getLogger import os import sys +from logging import getLogger from ..base.context import context -from ..utils import wrap_subprocess_call -from ..gateways.disk.delete import rm_rf from ..common.compat import encode_environment +from ..gateways.disk.delete import rm_rf from ..gateways.subprocess import subprocess_call +from ..utils import wrap_subprocess_call from .common import validate_prefix @@ -18,7 +16,9 @@ def execute(args, parser): # create run script script, command = wrap_subprocess_call( context.root_prefix, - validate_prefix(context.target_prefix or os.getenv("CONDA_PREFIX") or context.root_prefix), + validate_prefix( + context.target_prefix or os.getenv("CONDA_PREFIX") or context.root_prefix + ), args.dev, args.debug_wrapper_scripts, args.executable_call, @@ -44,7 +44,9 @@ def execute(args, parser): # log error if response.rc != 0: log = getLogger(__name__) - log.error(f"`conda run {' '.join(args.executable_call)}` failed. (See above for error)") + log.error( + f"`conda run {' '.join(args.executable_call)}` failed. (See above for error)" + ) # remove script if "CONDA_TEST_SAVE_TEMPS" not in os.environ: diff --git a/conda/cli/main_search.py b/conda/cli/main_search.py index c8c363cfdee..f6730bd584d 100644 --- a/conda/cli/main_search.py +++ b/conda/cli/main_search.py @@ -1,88 +1,111 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from collections import defaultdict from datetime import datetime, timezone from ..base.context import context from ..cli.common import stdout_json -from ..common.io import Spinner +from ..common.io import Spinner, dashlist from ..core.envs_manager import query_all_prefixes from ..core.index import calculate_channel_urls from ..core.subdir_data import SubdirData from ..models.match_spec import MatchSpec from ..models.records import PackageRecord from ..models.version import VersionOrder -from ..common.io import dashlist from ..utils import human_bytes def execute(args, parser): spec = MatchSpec(args.match_spec) - if spec.get_exact_value('subdir'): - subdirs = spec.get_exact_value('subdir'), + if spec.get_exact_value("subdir"): + subdirs = (spec.get_exact_value("subdir"),) else: subdirs = context.subdirs if args.envs: - with Spinner("Searching environments for %s" % spec, - not context.verbosity and not context.quiet, - context.json): + with Spinner( + "Searching environments for %s" % spec, + not context.verbosity and not context.quiet, + context.json, + ): prefix_matches = query_all_prefixes(spec) - ordered_result = tuple({ - 'location': prefix, - 'package_records': tuple(sorted( - (PackageRecord.from_objects(prefix_rec) for prefix_rec in prefix_recs), - key=lambda prec: prec._pkey - )), - } for prefix, prefix_recs in prefix_matches) + ordered_result = tuple( + { + "location": prefix, + "package_records": tuple( + sorted( + ( + PackageRecord.from_objects(prefix_rec) + for prefix_rec in prefix_recs + ), + key=lambda prec: prec._pkey, + ) + ), + } + for prefix, prefix_recs in prefix_matches + ) if context.json: stdout_json(ordered_result) elif args.info: for pkg_group in ordered_result: - for prec in pkg_group['package_records']: + for prec in pkg_group["package_records"]: pretty_record(prec) else: - builder = ['# %-13s %15s %15s %-20s %-20s' % ( - "Name", - "Version", - "Build", - "Channel", - "Location", - )] + builder = [ + "# %-13s %15s %15s %-20s %-20s" + % ( + "Name", + "Version", + "Build", + "Channel", + "Location", + ) + ] for pkg_group in ordered_result: - for prec in pkg_group['package_records']: - builder.append('%-15s %15s %15s %-20s %-20s' % ( - prec.name, - prec.version, - prec.build, - prec.channel.name, - pkg_group['location'], - )) - print('\n'.join(builder)) + for prec in pkg_group["package_records"]: + builder.append( + "%-15s %15s %15s %-20s %-20s" + % ( + prec.name, + prec.version, + prec.build, + prec.channel.name, + pkg_group["location"], + ) + ) + print("\n".join(builder)) return 0 - with Spinner("Loading channels", not context.verbosity and not context.quiet, context.json): - spec_channel = spec.get_exact_value('channel') + with Spinner( + "Loading channels", not context.verbosity and not context.quiet, context.json + ): + spec_channel = spec.get_exact_value("channel") channel_urls = (spec_channel,) if spec_channel else context.channels - matches = sorted(SubdirData.query_all(spec, channel_urls, subdirs), - key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build)) + matches = sorted( + SubdirData.query_all(spec, channel_urls, subdirs), + key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build), + ) if not matches and spec.get_exact_value("name"): flex_spec = MatchSpec(spec, name="*%s*" % spec.name) if not context.json: print(f"No match found for: {spec}. Search: {flex_spec}") - matches = sorted(SubdirData.query_all(flex_spec, channel_urls, subdirs), - key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build)) + matches = sorted( + SubdirData.query_all(flex_spec, channel_urls, subdirs), + key=lambda rec: (rec.name, VersionOrder(rec.version), rec.build), + ) if not matches: - channels_urls = tuple(calculate_channel_urls( - channel_urls=context.channels, - prepend=not args.override_channels, - platform=subdirs[0], - use_local=args.use_local, - )) + channels_urls = tuple( + calculate_channel_urls( + channel_urls=context.channels, + prepend=not args.override_channels, + platform=subdirs[0], + use_local=args.use_local, + ) + ) from ..exceptions import PackagesNotFoundError + raise PackagesNotFoundError((str(spec),), channels_urls) if context.json: @@ -96,20 +119,26 @@ def execute(args, parser): pretty_record(record) else: - builder = ['# %-18s %15s %15s %-20s' % ( - "Name", - "Version", - "Build", - "Channel", - )] + builder = [ + "# %-18s %15s %15s %-20s" + % ( + "Name", + "Version", + "Build", + "Channel", + ) + ] for record in matches: - builder.append('%-20s %15s %15s %-20s' % ( - record.name, - record.version, - record.build, - record.channel.name, - )) - print('\n'.join(builder)) + builder.append( + "%-20s %15s %15s %-20s" + % ( + record.name, + record.version, + record.build, + record.channel.name, + ) + ) + print("\n".join(builder)) def pretty_record(record): @@ -120,7 +149,7 @@ def push_line(display_name, attr_name): builder = [] builder.append(record.name + " " + record.version + " " + record.build) - builder.append('-'*len(builder[0])) + builder.append("-" * len(builder[0])) push_line("file name", "fn") push_line("name", "name") @@ -138,11 +167,14 @@ def push_line(display_name, attr_name): ) builder.append("%-12s: %s" % ("timestamp", date_str)) if record.track_features: - builder.append("%-12s: %s" % ("track_features", dashlist(record.track_features))) + builder.append( + "%-12s: %s" % ("track_features", dashlist(record.track_features)) + ) if record.constrains: builder.append("%-12s: %s" % ("constraints", dashlist(record.constrains))) builder.append( - "%-12s: %s" % ("dependencies", dashlist(record.depends) if record.depends else "[]") + "%-12s: %s" + % ("dependencies", dashlist(record.depends) if record.depends else "[]") ) - builder.append('\n') - print('\n'.join(builder)) + builder.append("\n") + print("\n".join(builder)) diff --git a/conda/cli/main_update.py b/conda/cli/main_update.py index 03073a99bc1..799b118104d 100644 --- a/conda/cli/main_update.py +++ b/conda/cli/main_update.py @@ -1,20 +1,22 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import sys -from .install import install from ..base.context import context from ..notices import notices +from .install import install @notices def execute(args, parser): if context.force: - print("\n\n" - "WARNING: The --force flag will be removed in a future conda release.\n" - " See 'conda update --help' for details about the --force-reinstall\n" - " and --clobber flags.\n" - "\n", file=sys.stderr) + print( + "\n\n" + "WARNING: The --force flag will be removed in a future conda release.\n" + " See 'conda update --help' for details about the --force-reinstall\n" + " and --clobber flags.\n" + "\n", + file=sys.stderr, + ) - install(args, parser, 'update') + install(args, parser, "update") diff --git a/conda/cli/python_api.py b/conda/cli/python_api.py index 0e1271bd254..ab3c27d831e 100644 --- a/conda/cli/python_api.py +++ b/conda/cli/python_api.py @@ -1,16 +1,15 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger -from .conda_argparse import do_call -from ..common.compat import encode_arguments -from .main import generate_parser from ..base.constants import SEARCH_PATH from ..base.context import context +from ..common.compat import encode_arguments from ..common.io import CaptureTarget, argv, captured from ..exceptions import conda_exception_handler from ..gateways.logging import initialize_std_loggers +from .conda_argparse import do_call +from .main import generate_parser log = getLogger(__name__) @@ -32,6 +31,7 @@ class Commands: STRING = CaptureTarget.STRING STDOUT = CaptureTarget.STDOUT + # Note, a deviated copy of this code appears in tests/test_create.py def run_command(command, *arguments, **kwargs): """Runs a conda command in-process with a given set of command-line interface arguments. @@ -72,10 +72,10 @@ def run_command(command, *arguments, **kwargs): >>> run_command(Commands.CREATE, ["-n", "newenv", "python=3", "flask"], search_path=()) """ initialize_std_loggers() - use_exception_handler = kwargs.pop('use_exception_handler', False) - configuration_search_path = kwargs.pop('search_path', SEARCH_PATH) - stdout = kwargs.pop('stdout', STRING) - stderr = kwargs.pop('stderr', STRING) + use_exception_handler = kwargs.pop("use_exception_handler", False) + configuration_search_path = kwargs.pop("search_path", SEARCH_PATH) + stdout = kwargs.pop("stdout", STRING) + stderr = kwargs.pop("stderr", STRING) p = generate_parser() if arguments and isinstance(arguments[0], list): @@ -92,15 +92,18 @@ def run_command(command, *arguments, **kwargs): ) from subprocess import list2cmdline + log.debug("executing command >>> conda %s", list2cmdline(arguments)) - is_run = arguments[0] == 'run' + is_run = arguments[0] == "run" if is_run: cap_args = (None, None) else: cap_args = (stdout, stderr) try: - with argv(['python_api'] + encode_arguments(arguments)), captured(*cap_args) as c: + with argv(["python_api"] + encode_arguments(arguments)), captured( + *cap_args + ) as c: if use_exception_handler: result = conda_exception_handler(do_call, args, p) else: @@ -117,5 +120,7 @@ def run_command(command, *arguments, **kwargs): e.stdout, e.stderr = stdout, stderr raise e return_code = result or 0 - log.debug("\n stdout: %s\n stderr: %s\n return_code: %s", stdout, stderr, return_code) + log.debug( + "\n stdout: %s\n stderr: %s\n return_code: %s", stdout, stderr, return_code + ) return stdout, stderr, return_code diff --git a/conda/common/_logic.py b/conda/common/_logic.py index 4c79f32f167..5332ce4b717 100644 --- a/conda/common/_logic.py +++ b/conda/common/_logic.py @@ -1,10 +1,9 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import sys from array import array from itertools import combinations from logging import DEBUG, getLogger -import sys log = getLogger(__name__) @@ -15,6 +14,7 @@ class _ClauseList: """Storage for the CNF clauses, represented as a list of tuples of ints.""" + def __init__(self): self._clause_list = [] # Methods append and extend are directly bound for performance reasons, @@ -51,7 +51,7 @@ def as_array(self): """ Return clauses as a flat int array, each clause being terminated by 0. """ - clause_array = array('i') + clause_array = array("i") for c in self._clause_list: clause_array.extend(c) clause_array.append(0) @@ -63,8 +63,9 @@ class _ClauseArray: Storage for the CNF clauses, represented as a flat int array. Each clause is terminated by int(0). """ + def __init__(self): - self._clause_array = array('i') + self._clause_array = array("i") # Methods append and extend are directly bound for performance reasons, # to avoid call overhead and lookups. self._array_append = self._clause_array.append @@ -101,7 +102,7 @@ def restore_state(self, saved_state): Removes clauses that were added after the state has been saved. """ len_clause_array = saved_state - self._clause_array[len_clause_array:] = array('i') + self._clause_array[len_clause_array:] = array("i") def as_list(self): """Return clauses as a list of tuples of ints.""" @@ -342,7 +343,18 @@ def And(self, f, g, polarity, add_new_clauses=False): # expressions and tuple additions in self.assign. x = self.new_var() if polarity in (True, None): - self.add_clauses([(-x, f,), (-x, g,)]) + self.add_clauses( + [ + ( + -x, + f, + ), + ( + -x, + g, + ), + ] + ) if polarity in (False, None): self.add_clauses([(x, -f, -g)]) return x @@ -368,7 +380,18 @@ def Or(self, f, g, polarity, add_new_clauses=False): if polarity in (True, None): self.add_clauses([(-x, f, g)]) if polarity in (False, None): - self.add_clauses([(x, -f,), (x, -g,)]) + self.add_clauses( + [ + ( + x, + -f, + ), + ( + x, + -g, + ), + ] + ) return x pval = [(f, g)] if polarity in (True, None) else [] nval = [(-f,), (-g,)] if polarity in (False, None) else [] @@ -522,7 +545,7 @@ def BDD(self, lits, coeffs, nterms, lo, hi, polarity): # ELSE l <= S <= u # we use memoization to prune common subexpressions total = sum(c for c in coeffs[:nterms]) - target = (nterms-1, 0, total) + target = (nterms - 1, 0, total) call_stack = [target] ret = {} call_stack_append = call_stack.append @@ -560,7 +583,9 @@ def BDD(self, lits, coeffs, nterms, lo, hi, polarity): # avoid calling self.assign here via add_new_clauses=True. # If we want to translate parts of the code to a compiled language, # self.BDD (+ its downward call stack) is the prime candidate! - ret[call_stack_pop()] = ITE(abs(LA), thi, tlo, polarity, add_new_clauses=True) + ret[call_stack_pop()] = ITE( + abs(LA), thi, tlo, polarity, add_new_clauses=True + ) return ret[target] def LinearBound(self, lits, coeffs, lo, hi, preprocess, polarity): @@ -571,7 +596,7 @@ def LinearBound(self, lits, coeffs, lo, hi, preprocess, polarity): nterms = len(coeffs) if nterms and coeffs[-1] > hi: nprune = sum(c > hi for c in coeffs) - log.trace('Eliminating %d/%d terms for bound violation' % (nprune, nterms)) + log.trace("Eliminating %d/%d terms for bound violation" % (nprune, nterms)) nterms -= nprune else: nprune = 0 @@ -611,6 +636,7 @@ def sat(self, additional=None, includeIf=False, limit=0): return [] saved_state = self._sat_solver.save_state() if additional: + def preproc(eqs): def preproc_(cc): for c in cc: @@ -619,6 +645,7 @@ def preproc_(cc): yield c if c == TRUE: break + for cc in eqs: cc = tuple(preproc_(cc)) if not cc: @@ -626,6 +653,7 @@ def preproc_(cc): break if cc[-1] != TRUE: yield cc + additional = list(preproc(additional)) if additional: if not additional[-1]: @@ -644,13 +672,13 @@ def minimize(self, lits, coeffs, bestsol=None, trymax=False): largest active coefficient value, then we minimize the sum. """ if bestsol is None or len(bestsol) < self.m: - log.debug('Clauses added, recomputing solution') + log.debug("Clauses added, recomputing solution") bestsol = self.sat() if bestsol is None or self.unsat: - log.debug('Constraints are unsatisfiable') + log.debug("Constraints are unsatisfiable") return bestsol, sum(abs(c) for c in coeffs) + 1 if coeffs else 1 if not coeffs: - log.debug('Empty objective, trivial solution') + log.debug("Empty objective, trivial solution") return bestsol, 0 lits, coeffs, offset = self.LB_Preprocess(lits, coeffs) @@ -664,12 +692,12 @@ def sum_val(sol, objective_dict): lo = 0 try0 = 0 - for peak in ((True, False) if maxval > 1 else (False,)): + for peak in (True, False) if maxval > 1 else (False,): if peak: - log.trace('Beginning peak minimization') + log.trace("Beginning peak minimization") objval = peak_val else: - log.trace('Beginning sum minimization') + log.trace("Beginning sum minimization") objval = sum_val objective_dict = {a: c for c, a in zip(coeffs, lits)} @@ -689,7 +717,7 @@ def sum_val(sol, objective_dict): log.trace("Initial range (%d,%d)" % (lo, hi)) while True: if try0 is None: - mid = (lo+hi) // 2 + mid = (lo + hi) // 2 else: mid = try0 if peak: @@ -702,8 +730,10 @@ def sum_val(sol, objective_dict): self.Require(self.LinearBound, lits, coeffs, lo, mid, False) if log.isEnabledFor(DEBUG): - log.trace('Bisection attempt: (%d,%d), (%d+%d) clauses' % - (lo, mid, nz, self.get_clause_count() - nz)) + log.trace( + "Bisection attempt: (%d,%d), (%d+%d) clauses" + % (lo, mid, nz, self.get_clause_count() - nz) + ) newsol = self.sat() if newsol is None: lo = mid + 1 @@ -731,7 +761,7 @@ def sum_val(sol, objective_dict): self.unsat = False try0 = None - log.debug('Final %s objective: %d' % ('peak' if peak else 'sum', bestval)) + log.debug("Final %s objective: %d" % ("peak" if peak else "sum", bestval)) if bestval == 0: break elif peak: @@ -744,6 +774,6 @@ def sum_val(sol, objective_dict): try0 = sum_val(bestsol, objective_dict) lo = bestval else: - log.debug('New peak objective: %d' % peak_val(bestsol, objective_dict)) + log.debug("New peak objective: %d" % peak_val(bestsol, objective_dict)) return bestsol, bestval diff --git a/conda/common/_os/__init__.py b/conda/common/_os/__init__.py index d09685aa593..0c63f283f38 100644 --- a/conda/common/_os/__init__.py +++ b/conda/common/_os/__init__.py @@ -1,11 +1,9 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger from ..compat import on_win - if on_win: from .windows import get_free_space_on_windows as get_free_space from .windows import is_admin_on_windows as is_admin diff --git a/conda/common/_os/linux.py b/conda/common/_os/linux.py index a9b94fbb144..5f79a30ba21 100644 --- a/conda/common/_os/linux.py +++ b/conda/common/_os/linux.py @@ -1,11 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import sys from functools import lru_cache -from genericpath import exists from logging import getLogger from os import scandir -import sys +from genericpath import exists log = getLogger(__name__) @@ -16,7 +16,7 @@ def linux_get_libc_version(): If on linux, returns (libc_family, version), otherwise (None, None). """ - if not sys.platform.startswith('linux'): + if not sys.platform.startswith("linux"): return None, None from os import confstr, confstr_names, readlink @@ -31,9 +31,12 @@ def linux_get_libc_version(): val = None for k, v in confstr_names_fallback.items(): - assert k not in confstr_names or confstr_names[k] == v, ( - "confstr_names_fallback for %s is %s yet in confstr_names it is %s" - "" % (k, confstr_names_fallback[k], confstr_names[k]) + assert ( + k not in confstr_names or confstr_names[k] == v + ), "confstr_names_fallback for %s is %s yet in confstr_names it is %s" "" % ( + k, + confstr_names_fallback[k], + confstr_names[k], ) try: val = str(confstr(v)) @@ -45,26 +48,32 @@ def linux_get_libc_version(): if not val: # pragma: no cover # Weird, play it safe and assume glibc 2.5 - family, version = 'glibc', '2.5' - log.warning("Failed to detect libc family and version, assuming %s/%s", family, version) + family, version = "glibc", "2.5" + log.warning( + "Failed to detect libc family and version, assuming %s/%s", family, version + ) return family, version - family, version = val.split(' ') + family, version = val.split(" ") # NPTL is just the name of the threading library, even though the # version refers to that of uClibc. readlink() can help to try to # figure out a better name instead. - if family == 'NPTL': # pragma: no cover - for clib in (entry.path for entry in scandir("/lib") if entry.name[:7] == "libc.so"): + if family == "NPTL": # pragma: no cover + for clib in ( + entry.path for entry in scandir("/lib") if entry.name[:7] == "libc.so" + ): clib = readlink(clib) if exists(clib): - if clib.startswith('libuClibc'): - if version.startswith('0.'): - family = 'uClibc' + if clib.startswith("libuClibc"): + if version.startswith("0."): + family = "uClibc" else: - family = 'uClibc-ng' + family = "uClibc-ng" return family, version # This could be some other C library; it is unlikely though. - family = 'uClibc' - log.warning("Failed to detect non-glibc family, assuming %s (%s)", family, version) + family = "uClibc" + log.warning( + "Failed to detect non-glibc family, assuming %s (%s)", family, version + ) return family, version return family, version diff --git a/conda/common/_os/unix.py b/conda/common/_os/unix.py index 945179418f2..33b6a7830dd 100644 --- a/conda/common/_os/unix.py +++ b/conda/common/_os/unix.py @@ -1,11 +1,8 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import os - from logging import getLogger - log = getLogger(__name__) diff --git a/conda/common/_os/windows.py b/conda/common/_os/windows.py index c4cb02f2c3d..38d9f3a747b 100644 --- a/conda/common/_os/windows.py +++ b/conda/common/_os/windows.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from enum import IntEnum from logging import getLogger @@ -9,9 +8,23 @@ log = getLogger(__name__) if on_win: - from ctypes import (POINTER, Structure, WinError, byref, c_ulong, c_char_p, c_int, c_ulonglong, - c_void_p, c_wchar_p, pointer, sizeof, windll) - from ctypes.wintypes import HANDLE, BOOL, DWORD, HWND, HINSTANCE, HKEY + from ctypes import ( + POINTER, + Structure, + WinError, + byref, + c_char_p, + c_int, + c_ulong, + c_ulonglong, + c_void_p, + c_wchar_p, + pointer, + sizeof, + windll, + ) + from ctypes.wintypes import BOOL, DWORD, HANDLE, HINSTANCE, HKEY, HWND + PHANDLE = POINTER(HANDLE) PDWORD = POINTER(DWORD) SEE_MASK_NOCLOSEPROCESS = 0x00000040 @@ -22,31 +35,31 @@ WaitForSingleObject.restype = DWORD CloseHandle = windll.kernel32.CloseHandle - CloseHandle.argtypes = (HANDLE, ) + CloseHandle.argtypes = (HANDLE,) CloseHandle.restype = BOOL class ShellExecuteInfo(Structure): """ -https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-shellexecuteexa -https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/ns-shellapi-_shellexecuteinfoa + https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-shellexecuteexa + https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/ns-shellapi-_shellexecuteinfoa """ _fields_ = [ - ('cbSize', DWORD), - ('fMask', c_ulong), - ('hwnd', HWND), - ('lpVerb', c_char_p), - ('lpFile', c_char_p), - ('lpParameters', c_char_p), - ('lpDirectory', c_char_p), - ('nShow', c_int), - ('hInstApp', HINSTANCE), - ('lpIDList', c_void_p), - ('lpClass', c_char_p), - ('hKeyClass', HKEY), - ('dwHotKey', DWORD), - ('hIcon', HANDLE), - ('hProcess', HANDLE) + ("cbSize", DWORD), + ("fMask", c_ulong), + ("hwnd", HWND), + ("lpVerb", c_char_p), + ("lpFile", c_char_p), + ("lpParameters", c_char_p), + ("lpDirectory", c_char_p), + ("nShow", c_int), + ("hInstApp", HINSTANCE), + ("lpIDList", c_void_p), + ("lpClass", c_char_p), + ("hKeyClass", HKEY), + ("dwHotKey", DWORD), + ("hIcon", HANDLE), + ("hProcess", HANDLE), ] def __init__(self, **kwargs): @@ -59,7 +72,7 @@ def __init__(self, **kwargs): PShellExecuteInfo = POINTER(ShellExecuteInfo) ShellExecuteEx = windll.Shell32.ShellExecuteExA - ShellExecuteEx.argtypes = (PShellExecuteInfo, ) + ShellExecuteEx.argtypes = (PShellExecuteInfo,) ShellExecuteEx.restype = BOOL @@ -106,7 +119,7 @@ def get_free_space_on_windows(dir_name): ) result = free_bytes.value except Exception as e: - log.info('%r', e) + log.info("%r", e) return result @@ -116,7 +129,7 @@ def is_admin_on_windows(): # pragma: unix no cover try: result = windll.shell32.IsUserAnAdmin() != 0 except Exception as e: # pragma: no cover - log.info('%r', e) + log.info("%r", e) # result = 'unknown' return result @@ -127,7 +140,7 @@ def _wait_and_close_handle(process_handle): WaitForSingleObject(process_handle, INFINITE) CloseHandle(process_handle) except Exception as e: - log.info('%r', e) + log.info("%r", e) def run_as_admin(args, wait=True): @@ -150,14 +163,14 @@ def run_as_admin(args, wait=True): - https://github.com/JustAMan/pyWinClobber/blob/master/win32elevate.py """ arg0 = args[0] - param_str = ' '.join(args[1:] if len(args) > 1 else ()) + param_str = " ".join(args[1:] if len(args) > 1 else ()) hprocess = None error_code = None try: execute_info = ShellExecuteInfo( fMask=SEE_MASK_NOCLOSEPROCESS, hwnd=None, - lpVerb='runas', + lpVerb="runas", lpFile=arg0, lpParameters=param_str, lpDirectory=None, @@ -168,7 +181,7 @@ def run_as_admin(args, wait=True): except Exception as e: successful = False error_code = e - log.info('%r', e) + log.info("%r", e) if not successful: error_code = WinError() diff --git a/conda/common/compat.py b/conda/common/compat.py index 7a89de0ca50..96fbd250917 100644 --- a/conda/common/compat.py +++ b/conda/common/compat.py @@ -42,6 +42,7 @@ def encode_arguments(arguments): from collections.abc import Iterable + def isiterable(obj): return not isinstance(obj, str) and isinstance(obj, Iterable) @@ -51,27 +52,40 @@ def isiterable(obj): # ############################# from collections import OrderedDict as odict # noqa: F401 - from io import open as io_open # NOQA -def open(file, mode='r', buffering=-1, encoding=None, errors=None, newline=None, closefd=True): - if 'b' in mode: - return io_open(file, str(mode), buffering=buffering, - errors=errors, newline=newline, closefd=closefd) +def open( + file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True +): + if "b" in mode: + return io_open( + file, + str(mode), + buffering=buffering, + errors=errors, + newline=newline, + closefd=closefd, + ) else: - return io_open(file, str(mode), buffering=buffering, - encoding=encoding or 'utf-8', errors=errors, newline=newline, - closefd=closefd) + return io_open( + file, + str(mode), + buffering=buffering, + encoding=encoding or "utf-8", + errors=errors, + newline=newline, + closefd=closefd, + ) def six_with_metaclass(meta, *bases): """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy # metaclass for one level of class instantiation that replaces itself with # the actual metaclass. class metaclass(type): - def __new__(cls, name, this_bases, d): return meta(name, bases, d) @@ -88,7 +102,7 @@ def __prepare__(cls, name, this_bases): def ensure_binary(value): try: - return value.encode('utf-8') + return value.encode("utf-8") except AttributeError: # pragma: no cover # AttributeError: '<>' object has no attribute 'encode' # In this case assume already binary type and do nothing @@ -97,7 +111,7 @@ def ensure_binary(value): def ensure_text_type(value) -> str: try: - return value.decode('utf-8') + return value.decode("utf-8") except AttributeError: # pragma: no cover # AttributeError: '<>' object has no attribute 'decode' # In this case assume already text_type and do nothing @@ -110,8 +124,8 @@ def ensure_text_type(value) -> str: from requests.packages.chardet import detect except ImportError: # pragma: no cover from pip._vendor.requests.packages.chardet import detect - encoding = detect(value).get('encoding') or 'utf-8' - return value.decode(encoding, errors='replace') + encoding = detect(value).get("encoding") or "utf-8" + return value.decode(encoding, errors="replace") except UnicodeEncodeError: # pragma: no cover # it's already str, so ignore? # not sure, surfaced with tests/models/test_match_spec.py test_tarball_match_specs @@ -121,7 +135,7 @@ def ensure_text_type(value) -> str: def ensure_unicode(value): try: - return value.decode('unicode_escape') + return value.decode("unicode_escape") except AttributeError: # pragma: no cover # AttributeError: '<>' object has no attribute 'decode' # In this case assume already unicode and do nothing @@ -139,7 +153,7 @@ def ensure_fs_path_encoding(value): def ensure_utf8_encoding(value): try: - return value.encode('utf-8') + return value.encode("utf-8") except AttributeError: return value except UnicodeEncodeError: diff --git a/conda/common/configuration.py b/conda/common/configuration.py index a647e7884ad..57892c62753 100644 --- a/conda/common/configuration.py +++ b/conda/common/configuration.py @@ -14,17 +14,17 @@ """ from __future__ import annotations +import copy +import sys from abc import ABCMeta, abstractmethod from collections import defaultdict from collections.abc import Mapping -import copy from enum import Enum, EnumMeta from itertools import chain from logging import getLogger from os import environ, scandir, stat from os.path import basename, expandvars from stat import S_IFDIR, S_IFMT, S_IFREG -import sys from typing import TYPE_CHECKING if TYPE_CHECKING: # pragma: no cover @@ -35,35 +35,37 @@ except ImportError: # pragma: no cover from .._vendor.boltons.setutils import IndexedSet -from .compat import isiterable, primitive_types -from .constants import NULL -from .path import expand -from .serialize import yaml_round_trip_load from .. import CondaError, CondaMultiError +from .._vendor.frozendict import frozendict from ..auxlib.collection import AttrDict, first, last, make_immutable from ..auxlib.exceptions import ThisShouldNeverHappenError from ..auxlib.type_coercion import TypeCoercionError, typify, typify_data_structure from ..common.iterators import unique -from .._vendor.frozendict import frozendict +from .compat import isiterable, primitive_types +from .constants import NULL +from .path import expand +from .serialize import yaml_round_trip_load try: - from ruamel.yaml.comments import CommentedSeq, CommentedMap + from ruamel.yaml.comments import CommentedMap, CommentedSeq from ruamel.yaml.reader import ReaderError from ruamel.yaml.scanner import ScannerError except ImportError: # pragma: no cover try: - from ruamel_yaml.comments import CommentedSeq, CommentedMap + from ruamel_yaml.comments import CommentedMap, CommentedSeq from ruamel_yaml.reader import ReaderError from ruamel_yaml.scanner import ScannerError except ImportError: - raise ImportError("No yaml library available. To proceed, conda install ruamel.yaml") + raise ImportError( + "No yaml library available. To proceed, conda install ruamel.yaml" + ) log = getLogger(__name__) EMPTY_MAP = frozendict() -def pretty_list(iterable, padding=' '): # TODO: move elsewhere in conda.common +def pretty_list(iterable, padding=" "): # TODO: move elsewhere in conda.common if not isiterable(iterable): iterable = [iterable] try: @@ -88,13 +90,12 @@ class ConfigurationError(CondaError): class ConfigurationLoadError(ConfigurationError): - def __init__(self, path, message_addition='', **kwargs): + def __init__(self, path, message_addition="", **kwargs): message = "Unable to load configuration file.\n path: %(path)s\n" super().__init__(message + message_addition, path=path, **kwargs) class ValidationError(ConfigurationError): - def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs): self.parameter_name = parameter_name self.parameter_value = parameter_value @@ -103,24 +104,36 @@ def __init__(self, parameter_name, parameter_value, source, msg=None, **kwargs): class MultipleKeysError(ValidationError): - def __init__(self, source, keys, preferred_key): self.source = source self.keys = keys - msg = ("Multiple aliased keys in file %s:\n" - "%s\n" - "Must declare only one. Prefer '%s'" % (source, pretty_list(keys), preferred_key)) + msg = ( + "Multiple aliased keys in file %s:\n" + "%s\n" + "Must declare only one. Prefer '%s'" + % (source, pretty_list(keys), preferred_key) + ) super().__init__(preferred_key, None, source, msg=msg) class InvalidTypeError(ValidationError): - def __init__(self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None): + def __init__( + self, parameter_name, parameter_value, source, wrong_type, valid_types, msg=None + ): self.wrong_type = wrong_type self.valid_types = valid_types if msg is None: - msg = ("Parameter %s = %r declared in %s has type %s.\n" - "Valid types:\n%s" % (parameter_name, parameter_value, - source, wrong_type, pretty_list(valid_types))) + msg = ( + "Parameter %s = %r declared in %s has type %s.\n" + "Valid types:\n%s" + % ( + parameter_name, + parameter_value, + source, + wrong_type, + pretty_list(valid_types), + ) + ) super().__init__(parameter_name, parameter_value, source, msg=msg) @@ -168,20 +181,19 @@ def from_value(cls, value): @classmethod def from_string(cls, string): try: - string = string.strip('!#') + string = string.strip("!#") return cls.from_value(string) except (ValueError, AttributeError): return None class RawParameter(metaclass=ABCMeta): - def __init__(self, source, key, raw_value): self.source = source self.key = key try: # ignore flake8 on this because it finds an error on py3 even though it is guarded - self._raw_value = unicode(raw_value.decode('utf-8')) # NOQA + self._raw_value = unicode(raw_value.decode("utf-8")) # NOQA except: self._raw_value = raw_value @@ -208,18 +220,20 @@ def make_raw_parameters(cls, source, from_map): class EnvRawParameter(RawParameter): - source = 'envvars' + source = "envvars" def value(self, parameter_obj): # note: this assumes that EnvRawParameters will only have flat configuration of either # primitive or sequential type - if hasattr(parameter_obj, 'string_delimiter'): + if hasattr(parameter_obj, "string_delimiter"): assert isinstance(self._raw_value, str) - string_delimiter = getattr(parameter_obj, 'string_delimiter') + string_delimiter = getattr(parameter_obj, "string_delimiter") # TODO: add stripping of !important, !top, and !bottom - return tuple(EnvRawParameter(EnvRawParameter.source, self.key, v) - for v in (vv.strip() for vv in self._raw_value.split(string_delimiter)) - if v) + return tuple( + EnvRawParameter(EnvRawParameter.source, self.key, v) + for v in (vv.strip() for vv in self._raw_value.split(string_delimiter)) + if v + ) else: return self.__important_split_value[0].strip() @@ -227,10 +241,10 @@ def keyflag(self): return ParameterFlag.final if len(self.__important_split_value) >= 2 else None def valueflags(self, parameter_obj): - if hasattr(parameter_obj, 'string_delimiter'): - string_delimiter = getattr(parameter_obj, 'string_delimiter') + if hasattr(parameter_obj, "string_delimiter"): + string_delimiter = getattr(parameter_obj, "string_delimiter") # TODO: add stripping of !important, !top, and !bottom - return tuple('' for _ in self._raw_value.split(string_delimiter)) + return tuple("" for _ in self._raw_value.split(string_delimiter)) else: return self.__important_split_value[0].strip() @@ -250,7 +264,7 @@ def make_raw_parameters(cls, appname): class ArgParseRawParameter(RawParameter): - source = 'cmd_line' + source = "cmd_line" def value(self, parameter_obj): # note: this assumes ArgParseRawParameter will only have flat configuration of either @@ -258,8 +272,9 @@ def value(self, parameter_obj): if isiterable(self._raw_value): children_values = [] for i in range(len(self._raw_value)): - children_values.append(ArgParseRawParameter( - self.source, self.key, self._raw_value[i])) + children_values.append( + ArgParseRawParameter(self.source, self.key, self._raw_value[i]) + ) return tuple(children_values) else: return make_immutable(self._raw_value) @@ -272,7 +287,9 @@ def valueflags(self, parameter_obj): @classmethod def make_raw_parameters(cls, args_from_argparse): - return super().make_raw_parameters(ArgParseRawParameter.source, args_from_argparse) + return super().make_raw_parameters( + ArgParseRawParameter.source, args_from_argparse + ) class YamlRawParameter(RawParameter): @@ -284,20 +301,29 @@ def __init__(self, source, key, raw_value, key_comment): if isinstance(self._raw_value, CommentedSeq): value_comments = self._get_yaml_list_comments(self._raw_value) - self._value_flags = tuple(ParameterFlag.from_string(s) for s in value_comments) + self._value_flags = tuple( + ParameterFlag.from_string(s) for s in value_comments + ) children_values = [] for i in range(len(self._raw_value)): - children_values.append(YamlRawParameter( - self.source, self.key, self._raw_value[i], value_comments[i])) + children_values.append( + YamlRawParameter( + self.source, self.key, self._raw_value[i], value_comments[i] + ) + ) self._value = tuple(children_values) elif isinstance(self._raw_value, CommentedMap): value_comments = self._get_yaml_map_comments(self._raw_value) self._value_flags = { - k: ParameterFlag.from_string(v) for k, v in value_comments.items() if v is not None + k: ParameterFlag.from_string(v) + for k, v in value_comments.items() + if v is not None } children_values = {} for k, v in self._raw_value.items(): - children_values[k] = YamlRawParameter(self.source, self.key, v, value_comments[k]) + children_values[k] = YamlRawParameter( + self.source, self.key, v, value_comments[k] + ) self._value = frozendict(children_values) elif isinstance(self._raw_value, primitive_types): self._value_flags = None @@ -358,7 +384,9 @@ def _get_yaml_map_comments(value): def make_raw_parameters(cls, source, from_map): if from_map: return { - key: cls(source, key, from_map[key], cls._get_yaml_key_comment(from_map, key)) + key: cls( + source, key, from_map[key], cls._get_yaml_key_comment(from_map, key) + ) for key in from_map } return EMPTY_MAP @@ -374,12 +402,14 @@ def make_raw_parameters_from_file(cls, filepath): filepath, " reason: invalid yaml at line %(line)s, column %(column)s", line=mark.line, - column=mark.column + column=mark.column, ) except ReaderError as err: - raise ConfigurationLoadError(filepath, - " reason: invalid yaml at position %(position)s", - position=err.position) + raise ConfigurationLoadError( + filepath, + " reason: invalid yaml at position %(position)s", + position=err.position, + ) return cls.make_raw_parameters(filepath, yaml_obj) or EMPTY_MAP @@ -399,15 +429,17 @@ def __init__(self, source, key, raw_value): elif isiterable(self._raw_value): children_values = [] for i in range(len(self._raw_value)): - children_values.append(DefaultValueRawParameter( - self.source, self.key, self._raw_value[i])) + children_values.append( + DefaultValueRawParameter(self.source, self.key, self._raw_value[i]) + ) self._value = tuple(children_values) elif isinstance(self._raw_value, ConfigurationObject): self._value = self._raw_value for attr_name, attr_value in vars(self._raw_value).items(): self._value.__setattr__( attr_name, - DefaultValueRawParameter(self.source, self.key, attr_value)) + DefaultValueRawParameter(self.source, self.key, attr_value), + ) elif isinstance(self._raw_value, Enum): self._value = self._raw_value elif isinstance(self._raw_value, primitive_types): @@ -440,12 +472,15 @@ def load_file_configs(search_path): # returns an ordered map of filepath and dict of raw parameter objects def _file_loader(fullpath): - assert fullpath.endswith((".yml", ".yaml")) or "condarc" in basename(fullpath), fullpath + assert fullpath.endswith((".yml", ".yaml")) or "condarc" in basename( + fullpath + ), fullpath yield fullpath, YamlRawParameter.make_raw_parameters_from_file(fullpath) def _dir_loader(fullpath): for filepath in sorted( - p for p in (entry.path for entry in scandir(fullpath)) + p + for p in (entry.path for entry in scandir(fullpath)) if p[-4:] == ".yml" or p[-5:] == ".yaml" ): yield filepath, YamlRawParameter.make_raw_parameters_from_file(filepath) @@ -465,9 +500,11 @@ def _get_st_mode(path): expanded_paths = tuple(expand(path) for path in search_path) stat_paths = (_get_st_mode(path) for path in expanded_paths) - load_paths = (_loader[st_mode](path) - for path, st_mode in zip(expanded_paths, stat_paths) - if st_mode is not None) + load_paths = ( + _loader[st_mode](path) + for path, st_mode in zip(expanded_paths, stat_paths) + if st_mode is not None + ) raw_data = dict(kv for kv in chain.from_iterable(load_paths)) return raw_data @@ -516,14 +553,19 @@ def collect_errors(self, instance, typed_value, source="<>"): """ errors = [] if not isinstance(typed_value, self._type): - errors.append(InvalidTypeError(self._name, typed_value, source, type(self.value), - self._type)) + errors.append( + InvalidTypeError( + self._name, typed_value, source, type(self.value), self._type + ) + ) elif self._validation is not None: result = self._validation(typed_value) if result is False: errors.append(ValidationError(self._name, typed_value, source)) elif isinstance(result, str): - errors.append(CustomValidationError(self._name, typed_value, source, result)) + errors.append( + CustomValidationError(self._name, typed_value, source, result) + ) return errors def expand(self): @@ -571,11 +613,15 @@ def typify(self, source): """ element_type = self._element_type try: - return LoadedParameter._typify_data_structure(self.value, source, element_type) + return LoadedParameter._typify_data_structure( + self.value, source, element_type + ) except TypeCoercionError as e: msg = str(e) if issubclass(element_type, Enum): - choices = ", ".join(map("'{}'".format, element_type.__members__.values())) + choices = ", ".join( + map("'{}'".format, element_type.__members__.values()) + ) msg += f"\nValid choices for {self._name}: {choices}" raise CustomValidationError(self._name, e.value, source, msg) @@ -590,8 +636,11 @@ def _typify_data_structure(value, source, type_hint=None): if isinstance(attr_value, LoadedParameter): value.__setattr__(attr_name, attr_value.typify(source)) return value - elif (isinstance(value, str) - and isinstance(type_hint, type) and issubclass(type_hint, str)): + elif ( + isinstance(value, str) + and isinstance(type_hint, type) + and issubclass(type_hint, str) + ): # This block is necessary because if we fall through to typify(), we end up calling # .strip() on the str, when sometimes we want to preserve preceding and trailing # whitespace. @@ -605,10 +654,12 @@ def _match_key_is_important(loaded_parameter): @staticmethod def _first_important_matches(matches): - idx = first(enumerate(matches), - lambda x: LoadedParameter._match_key_is_important(x[1]), - apply=lambda x: x[0]) - return matches if idx is None else matches[:idx+1] + idx = first( + enumerate(matches), + lambda x: LoadedParameter._match_key_is_important(x[1]), + apply=lambda x: x[0], + ) + return matches if idx is None else matches[: idx + 1] class PrimitiveLoadedParameter(LoadedParameter): @@ -619,7 +670,9 @@ class PrimitiveLoadedParameter(LoadedParameter): python 2 has long and unicode types. """ - def __init__(self, name, element_type, value, key_flag, value_flags, validation=None): + def __init__( + self, name, element_type, value, key_flag, value_flags, validation=None + ): """ Args: element_type (type or Tuple[type]): Type-validation of parameter's value. @@ -638,7 +691,9 @@ def __hash__(self): return hash(self.value) def merge(self, matches): - important_match = first(matches, LoadedParameter._match_key_is_important, default=None) + important_match = first( + matches, LoadedParameter._match_key_is_important, default=None + ) if important_match is not None: return important_match @@ -652,9 +707,12 @@ class MapLoadedParameter(LoadedParameter): """ LoadedParameter type that holds a map (i.e. dict) of LoadedParameters. """ + _type = frozendict - def __init__(self, name, value, element_type, key_flag, value_flags, validation=None): + def __init__( + self, name, value, element_type, key_flag, value_flags, validation=None + ): """ Args: value (Mapping): Map of string keys to LoadedParameter values. @@ -704,7 +762,9 @@ def merge(self, parameters: Sequence[MapLoadedParameter]) -> MapLoadedParameter: for parameter in parameters: for key, value in parameter.value.items(): grouped_map.setdefault(key, []).append(value) - merged_map = {key: values[0].merge(values) for key, values in grouped_map.items()} + merged_map = { + key: values[0].merge(values) for key, values in grouped_map.items() + } # update merged_map with final_map values merged_value = frozendict({**merged_map, **final_map}) @@ -716,16 +776,20 @@ def merge(self, parameters: Sequence[MapLoadedParameter]) -> MapLoadedParameter: self._element_type, self.key_flag, self.value_flags, - validation=self._validation) + validation=self._validation, + ) class SequenceLoadedParameter(LoadedParameter): """ LoadedParameter type that holds a sequence (i.e. list) of LoadedParameters. """ + _type = tuple - def __init__(self, name, value, element_type, key_flag, value_flags, validation=None): + def __init__( + self, name, value, element_type, key_flag, value_flags, validation=None + ): """ Args: value (Sequence): Sequence of LoadedParameter values. @@ -743,27 +807,38 @@ def collect_errors(self, instance, typed_value, source="<>"): return errors def merge(self, matches): - # get matches up to and including first important_match # but if no important_match, then all matches are important_matches - relevant_matches_and_values = tuple((match, match.value) for match in - LoadedParameter._first_important_matches(matches)) + relevant_matches_and_values = tuple( + (match, match.value) + for match in LoadedParameter._first_important_matches(matches) + ) for match, value in relevant_matches_and_values: if not isinstance(value, tuple): - raise InvalidTypeError(self.name, value, match.source, value.__class__.__name__, - self._type.__name__) + raise InvalidTypeError( + self.name, + value, + match.source, + value.__class__.__name__, + self._type.__name__, + ) # get individual lines from important_matches that were marked important # these will be prepended to the final result def get_marked_lines(match, marker): return ( - tuple(line for line, flag in zip(match.value, match.value_flags) if flag is marker) + tuple( + line + for line, flag in zip(match.value, match.value_flags) + if flag is marker + ) if match else () ) top_lines = chain.from_iterable( - get_marked_lines(m, ParameterFlag.top) for m, _ in relevant_matches_and_values + get_marked_lines(m, ParameterFlag.top) + for m, _ in relevant_matches_and_values ) # also get lines that were marked as bottom, but reverse the match order so that lines @@ -777,7 +852,9 @@ def get_marked_lines(match, marker): # now, concat all lines, while reversing the matches # reverse because elements closer to the end of search path take precedence - all_lines = chain.from_iterable(v for _, v in reversed(relevant_matches_and_values)) + all_lines = chain.from_iterable( + v for _, v in reversed(relevant_matches_and_values) + ) # stack top_lines + all_lines, then de-dupe top_deduped = tuple(unique((*top_lines, *all_lines))) @@ -786,7 +863,9 @@ def get_marked_lines(match, marker): # this gives us the reverse of the order we want, but almost there # NOTE: for a line value marked both top and bottom, the bottom marker will win out # for the top marker to win out, we'd need one additional de-dupe step - bottom_deduped = tuple(unique((*reversed(bottom_lines), *reversed(top_deduped)))) + bottom_deduped = tuple( + unique((*reversed(bottom_lines), *reversed(top_deduped))) + ) # just reverse, and we're good to go merged_values = tuple(reversed(bottom_deduped)) @@ -796,16 +875,20 @@ def get_marked_lines(match, marker): self._element_type, self.key_flag, self.value_flags, - validation=self._validation) + validation=self._validation, + ) class ObjectLoadedParameter(LoadedParameter): """ LoadedParameter type that holds a mapping (i.e. object) of LoadedParameters. """ + _type = object - def __init__(self, name, value, element_type, key_flag, value_flags, validation=None): + def __init__( + self, name, value, element_type, key_flag, value_flags, validation=None + ): """ Args: value (Sequence): Object with LoadedParameter fields. @@ -822,10 +905,14 @@ def collect_errors(self, instance, typed_value, source="<>"): if isinstance(self.value, ConfigurationObject): for key, value in vars(self.value).items(): if isinstance(value, LoadedParameter): - errors.extend(value.collect_errors(instance, typed_value[key], source)) + errors.extend( + value.collect_errors(instance, typed_value[key], source) + ) return errors - def merge(self, parameters: Sequence[ObjectLoadedParameter]) -> ObjectLoadedParameter: + def merge( + self, parameters: Sequence[ObjectLoadedParameter] + ) -> ObjectLoadedParameter: # get all parameters up to and including first important_match # but if no important_match, then all parameters are important_matches parameters = LoadedParameter._first_important_matches(parameters) @@ -848,7 +935,9 @@ def merge(self, parameters: Sequence[ObjectLoadedParameter]) -> ObjectLoadedPara for parameter in parameters: for key, value in vars(parameter.value).items(): grouped_map.setdefault(key, []).append(value) - merged_map = {key: values[0].merge(values) for key, values in grouped_map.items()} + merged_map = { + key: values[0].merge(values) for key, values in grouped_map.items() + } # update merged_map with final_map values merged_value = copy.deepcopy(self._element_type) @@ -862,13 +951,15 @@ def merge(self, parameters: Sequence[ObjectLoadedParameter]) -> ObjectLoadedPara self._element_type, self.key_flag, self.value_flags, - validation=self._validation) + validation=self._validation, + ) class ConfigurationObject: """ Dummy class to mark whether a Python object has config parameters within. """ + pass @@ -916,7 +1007,8 @@ def get_all_matches(self, name, names, instance): multikey_exceptions = [] for filepath, raw_parameters in instance.raw_data.items(): match, error = ParameterLoader.raw_parameters_from_single_source( - name, names, raw_parameters) + name, names, raw_parameters + ) if match is not None: matches.append(match) if error: @@ -943,7 +1035,9 @@ def typify(self, name, source, value): except TypeCoercionError as e: msg = str(e) if issubclass(element_type, Enum): - choices = ", ".join(map("'{}'".format, element_type.__members__.values())) + choices = ", ".join( + map("'{}'".format, element_type.__members__.values()) + ) msg += f"\nValid choices for {name}: {choices}" raise CustomValidationError(name, e.value, source, msg) @@ -974,13 +1068,15 @@ def load(self, name, match): match.value(self._element_type), match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) class MapParameter(Parameter): """ Parameter type for a Configuration class that holds a map (i.e. dict) of Parameters. """ + _type = frozendict def __init__(self, element_type, default=frozendict(), validation=None): @@ -1000,7 +1096,6 @@ def get_all_matches(self, name, names, instance): return matches, exceptions def load(self, name, match): - value = match.value(self._element_type) if value is None: return MapLoadedParameter( @@ -1009,11 +1104,13 @@ def load(self, name, match): self._element_type, match.keyflag(), frozendict(), - validation=self._validation) + validation=self._validation, + ) if not isinstance(value, Mapping): - raise InvalidTypeError(name, value, match.source, value.__class__.__name__, - self._type.__name__) + raise InvalidTypeError( + name, value, match.source, value.__class__.__name__, self._type.__name__ + ) loaded_map = {} for key, child_value in match.value(self._element_type).items(): @@ -1026,16 +1123,18 @@ def load(self, name, match): self._element_type, match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) class SequenceParameter(Parameter): """ Parameter type for a Configuration class that holds a sequence (i.e. list) of Parameters. """ + _type = tuple - def __init__(self, element_type, default=(), validation=None, string_delimiter=','): + def __init__(self, element_type, default=(), validation=None, string_delimiter=","): """ Args: element_type (Parameter): The Parameter type that is held in the sequence. @@ -1055,7 +1154,6 @@ def get_all_matches(self, name, names, instance): return matches, exceptions def load(self, name, match): - value = match.value(self) if value is None: return SequenceLoadedParameter( @@ -1068,8 +1166,9 @@ def load(self, name, match): ) if not isiterable(value): - raise InvalidTypeError(name, value, match.source, value.__class__.__name__, - self._type.__name__) + raise InvalidTypeError( + name, value, match.source, value.__class__.__name__, self._type.__name__ + ) loaded_sequence = [] for child_value in value: @@ -1082,13 +1181,15 @@ def load(self, name, match): self._element_type, match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) class ObjectParameter(Parameter): """ Parameter type for a Configuration class that holds an object with Parameter fields. """ + _type = object def __init__(self, element_type, default=ConfigurationObject(), validation=None): @@ -1107,7 +1208,6 @@ def get_all_matches(self, name, names, instance): return matches, exceptions def load(self, name, match): - value = match.value(self._element_type) if value is None: return ObjectLoadedParameter( @@ -1116,21 +1216,23 @@ def load(self, name, match): self._element_type, match.keyflag(), None, - validation=self._validation) + validation=self._validation, + ) if not isinstance(value, (Mapping, ConfigurationObject)): - raise InvalidTypeError(name, value, match.source, value.__class__.__name__, - self._type.__name__) + raise InvalidTypeError( + name, value, match.source, value.__class__.__name__, self._type.__name__ + ) # for a default object, extract out the instance variables if isinstance(value, ConfigurationObject): value = vars(value) - object_parameter_attrs = {attr_name: parameter_type - for attr_name, parameter_type - in vars(self._element_type).items() - if isinstance(parameter_type, Parameter) - and attr_name in value.keys()} + object_parameter_attrs = { + attr_name: parameter_type + for attr_name, parameter_type in vars(self._element_type).items() + if isinstance(parameter_type, Parameter) and attr_name in value.keys() + } # recursively load object fields loaded_attrs = {} @@ -1150,7 +1252,8 @@ def load(self, name, match): self._element_type, match.keyflag(), match.valueflags(self._element_type), - validation=self._validation) + validation=self._validation, + ) class ParameterLoader: @@ -1176,7 +1279,7 @@ def _set_name(self, name): # this is an explicit method, and not a descriptor/setter # it's meant to be called by the Configuration metaclass self._name = name - _names = frozenset(x for x in chain(self.aliases, (name, ))) + _names = frozenset(x for x in chain(self.aliases, (name,))) self._names = _names return name @@ -1226,7 +1329,8 @@ def __get__(self, instance, instance_type): def _raw_parameters_from_single_source(self, raw_parameters): return ParameterLoader.raw_parameters_from_single_source( - self.name, self.names, raw_parameters) + self.name, self.names, raw_parameters + ) @staticmethod def raw_parameters_from_single_source(name, names, raw_parameters): @@ -1241,10 +1345,12 @@ def raw_parameters_from_single_source(name, names, raw_parameters): return next(iter(matches.values())), None elif name in keys: return matches[name], MultipleKeysError( - raw_parameters[next(iter(keys))].source, keys, name) + raw_parameters[next(iter(keys))].source, keys, name + ) else: - return None, MultipleKeysError(raw_parameters[next(iter(keys))].source, - keys, name) + return None, MultipleKeysError( + raw_parameters[next(iter(keys))].source, keys, name + ) class ConfigurationType(type): @@ -1254,12 +1360,14 @@ def __init__(cls, name, bases, attr): super().__init__(name, bases, attr) # call _set_name for each parameter - cls.parameter_names = tuple(p._set_name(name) for name, p in cls.__dict__.items() - if isinstance(p, ParameterLoader)) + cls.parameter_names = tuple( + p._set_name(name) + for name, p in cls.__dict__.items() + if isinstance(p, ParameterLoader) + ) class Configuration(metaclass=ConfigurationType): - def __init__(self, search_path=(), app_name=None, argparse_args=None): # Currently, __init__ does a **full** disk reload of all files. # A future improvement would be to cache files that are already loaded. @@ -1282,29 +1390,35 @@ def _set_env_vars(self, app_name=None): self._app_name = app_name if not app_name: return self - self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters(app_name) + self.raw_data[EnvRawParameter.source] = EnvRawParameter.make_raw_parameters( + app_name + ) self._reset_cache() return self def _set_argparse_args(self, argparse_args): # the argparse_args we store internally in this class as self._argparse_args # will be a mapping type, not a non-`dict` object like argparse_args is natively - if hasattr(argparse_args, '__dict__'): + if hasattr(argparse_args, "__dict__"): # the argparse_args from argparse will be an object with a __dict__ attribute # and not a mapping type like this method will turn it into - self._argparse_args = AttrDict((k, v) for k, v, in vars(argparse_args).items() - if v is not NULL) + self._argparse_args = AttrDict( + (k, v) for k, v, in vars(argparse_args).items() if v is not NULL + ) elif not argparse_args: # argparse_args can be initialized as `None` self._argparse_args = AttrDict() else: # we're calling this method with argparse_args that are a mapping type, likely # already having been processed by this method before - self._argparse_args = AttrDict((k, v) for k, v, in argparse_args.items() - if v is not NULL) + self._argparse_args = AttrDict( + (k, v) for k, v, in argparse_args.items() if v is not NULL + ) source = ArgParseRawParameter.source - self.raw_data[source] = ArgParseRawParameter.make_raw_parameters(self._argparse_args) + self.raw_data[source] = ArgParseRawParameter.make_raw_parameters( + self._argparse_args + ) self._reset_cache() return self @@ -1330,7 +1444,9 @@ def check_source(self, source): raw_parameters = self.raw_data[source] for key in self.parameter_names: parameter = self.__class__.__dict__[key] - match, multikey_error = parameter._raw_parameters_from_single_source(raw_parameters) + match, multikey_error = parameter._raw_parameters_from_single_source( + raw_parameters + ) if multikey_error: validation_errors.append(multikey_error) @@ -1348,7 +1464,8 @@ def check_source(self, source): validation_errors.append(e) else: collected_errors = loaded_parameter.collect_errors( - self, typed_value, match.source) + self, typed_value, match.source + ) if collected_errors: validation_errors.extend(collected_errors) else: @@ -1360,8 +1477,11 @@ def check_source(self, source): return typed_values, validation_errors def validate_all(self): - validation_errors = list(chain.from_iterable(self.check_source(source)[1] - for source in self.raw_data)) + validation_errors = list( + chain.from_iterable( + self.check_source(source)[1] for source in self.raw_data + ) + ) raise_errors(validation_errors) self.validate_configuration() @@ -1370,12 +1490,14 @@ def _collect_validation_error(func, *args, **kwargs): try: func(*args, **kwargs) except ConfigurationError as e: - return e.errors if hasattr(e, 'errors') else e, + return (e.errors if hasattr(e, "errors") else e,) return () def validate_configuration(self): - errors = chain.from_iterable(Configuration._collect_validation_error(getattr, self, name) - for name in self.parameter_names) + errors = chain.from_iterable( + Configuration._collect_validation_error(getattr, self, name) + for name in self.parameter_names + ) post_errors = self.post_build_validation() raise_errors(tuple(chain.from_iterable((errors, post_errors)))) @@ -1393,16 +1515,16 @@ def collect_all(self): def describe_parameter(self, parameter_name): # TODO, in Parameter base class, rename element_type to value_type if parameter_name not in self.parameter_names: - parameter_name = '_' + parameter_name + parameter_name = "_" + parameter_name parameter_loader = self.__class__.__dict__[parameter_name] parameter = parameter_loader.type assert isinstance(parameter, Parameter) # dedupe leading underscore from name - name = parameter_loader.name.lstrip('_') + name = parameter_loader.name.lstrip("_") aliases = tuple(alias for alias in parameter_loader.aliases if alias != name) - description = self.get_descriptions().get(name, '') + description = self.get_descriptions().get(name, "") et = parameter._element_type if type(et) == EnumMeta: et = [et] @@ -1411,29 +1533,32 @@ def describe_parameter(self, parameter_name): if isinstance(parameter._element_type, Parameter): element_types = tuple( - _et.__class__.__name__.lower().replace("parameter", "") for _et in et) + _et.__class__.__name__.lower().replace("parameter", "") for _et in et + ) else: element_types = tuple(_et.__name__ for _et in et) details = { - 'parameter_type': parameter.__class__.__name__.lower().replace("parameter", ""), - 'name': name, - 'aliases': aliases, - 'element_types': element_types, - 'default_value': parameter.default.typify("<>"), - 'description': description.replace('\n', ' ').strip(), + "parameter_type": parameter.__class__.__name__.lower().replace( + "parameter", "" + ), + "name": name, + "aliases": aliases, + "element_types": element_types, + "default_value": parameter.default.typify("<>"), + "description": description.replace("\n", " ").strip(), } if isinstance(parameter, SequenceParameter): - details['string_delimiter'] = parameter.string_delimiter + details["string_delimiter"] = parameter.string_delimiter return details def list_parameters(self): - return tuple(sorted(name.lstrip('_') for name in self.parameter_names)) + return tuple(sorted(name.lstrip("_") for name in self.parameter_names)) def typify_parameter(self, parameter_name, value, source): # return a tuple with correct parameter name and typed-value if parameter_name not in self.parameter_names: - parameter_name = '_' + parameter_name + parameter_name = "_" + parameter_name parameter_loader = self.__class__.__dict__[parameter_name] parameter = parameter_loader.type assert isinstance(parameter, Parameter) diff --git a/conda/common/constants.py b/conda/common/constants.py index 7595c4c03ff..d1dfbac5d4d 100644 --- a/conda/common/constants.py +++ b/conda/common/constants.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from ..auxlib import NULL # Use this NULL object when needing to distinguish a value from None diff --git a/conda/common/decorators.py b/conda/common/decorators.py index d4745c41782..9a7d4e25be9 100644 --- a/conda/common/decorators.py +++ b/conda/common/decorators.py @@ -1,27 +1,29 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import os from functools import wraps def env_override(envvar_name, convert_empty_to_none=False): - '''Override the return value of the decorated function with an environment variable. + """Override the return value of the decorated function with an environment variable. If convert_empty_to_none is true, if the value of the environment variable is the empty string, a None value will be returned. - ''' + """ + def decorator(func): @wraps(func) def wrapper(*args, **kwargs): value = os.environ.get(envvar_name, None) if value is not None: - if value == '' and convert_empty_to_none: + if value == "" and convert_empty_to_none: return None else: return value else: return func(*args, **kwargs) + return wrapper + return decorator diff --git a/conda/common/disk.py b/conda/common/disk.py index 374f8f46e10..d6fb7ad6030 100644 --- a/conda/common/disk.py +++ b/conda/common/disk.py @@ -1,10 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from contextlib import contextmanager from os import unlink + from ..auxlib.compat import Utf8NamedTemporaryFile + @contextmanager def temporary_content_in_file(content, suffix=""): # content returns temporary file path with contents diff --git a/conda/common/io.py b/conda/common/io.py index b2f65289238..8eb475ed711 100644 --- a/conda/common/io.py +++ b/conda/common/io.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json import logging import os @@ -63,7 +62,9 @@ def format(self, record): "%(levelname)s %(name)s:%(funcName)s(%(lineno)d): %(message)s" ) else: - _FORMATTER = Formatter("%(levelname)s %(name)s:%(funcName)s(%(lineno)d): %(message)s") + _FORMATTER = Formatter( + "%(levelname)s %(name)s:%(funcName)s(%(lineno)d): %(message)s" + ) def dashlist(iterable, indent=2): @@ -118,7 +119,6 @@ class CaptureTarget(Enum): @contextmanager def env_vars(var_map=None, callback=None, stack_callback=None): - if var_map is None: var_map = {} @@ -180,6 +180,7 @@ def captured(stdout=CaptureTarget.STRING, stderr=CaptureTarget.STRING): CapturedText: has attributes stdout, stderr which are either strings, None or the corresponding file-like function argument. """ + # NOTE: This function is not thread-safe. Using within multi-threading may cause spurious # behavior of not returning sys.stdout and sys.stderr back to their 'proper' state # """ @@ -299,7 +300,12 @@ def disable_logger(logger_name): @contextmanager def stderr_log_level(level, logger_name=None): logr = getLogger(logger_name) - _hndlrs, _lvl, _dsbld, _prpgt = logr.handlers, logr.level, logr.disabled, logr.propagate + _hndlrs, _lvl, _dsbld, _prpgt = ( + logr.handlers, + logr.level, + logr.disabled, + logr.propagate, + ) handler = StreamHandler(sys.stderr) handler.name = "stderr" handler.setLevel(level) @@ -317,7 +323,9 @@ def stderr_log_level(level, logger_name=None): logr.propagate = _prpgt -def attach_stderr_handler(level=WARN, logger_name=None, propagate=False, formatter=None): +def attach_stderr_handler( + level=WARN, logger_name=None, propagate=False, formatter=None +): # get old stderr logger logr = getLogger(logger_name) old_stderr_handler = next( @@ -393,7 +401,9 @@ def __init__(self, message, enabled=True, json=False, fail_message="failed\n"): self._spinner_thread = Thread(target=self._start_spinning) self._indicator_length = len(next(self.spinner_cycle)) + 1 self.fh = sys.stdout - self.show_spin = enabled and not json and hasattr(self.fh, "isatty") and self.fh.isatty() + self.show_spin = ( + enabled and not json and hasattr(self.fh, "isatty") and self.fh.isatty() + ) self.fail_message = fail_message def start(self): @@ -448,7 +458,9 @@ def get_lock(cls): cls._lock = RLock() return cls._lock - def __init__(self, description, enabled=True, json=False, position=None, leave=True): + def __init__( + self, description, enabled=True, json=False, position=None, leave=True + ): """ Args: description (str): @@ -513,7 +525,8 @@ def close(self): if self.enabled and self.json: with self.get_lock(): sys.stdout.write( - '{"fetch":"%s","finished":true,"maxval":1,"progress":1}\n\0' % self.description + '{"fetch":"%s","finished":true,"maxval":1,"progress":1}\n\0' + % self.description ) sys.stdout.flush() elif self.enabled: @@ -596,7 +609,9 @@ def submit(self, fn, *args, **kwargs): def get_instrumentation_record_file(): default_record_file = join("~", ".conda", "instrumentation-record.csv") - return expand(os.environ.get("CONDA_INSTRUMENTATION_RECORD_FILE", default_record_file)) + return expand( + os.environ.get("CONDA_INSTRUMENTATION_RECORD_FILE", default_record_file) + ) class time_recorder(ContextDecorator): # pragma: no cover diff --git a/conda/common/logic.py b/conda/common/logic.py index 6fdfacc3809..7b0929bba2f 100644 --- a/conda/common/logic.py +++ b/conda/common/logic.py @@ -29,8 +29,8 @@ from itertools import chain -from ._logic import Clauses as _Clauses, FALSE, TRUE - +from ._logic import FALSE, TRUE +from ._logic import Clauses as _Clauses # TODO: We may want to turn the user-facing {TRUE,FALSE} values into an Enum and # hide the _logic.{TRUE,FALSE} values as an implementation detail. @@ -84,7 +84,7 @@ def add_clauses(self, clauses): def name_var(self, m, name): self._check_literal(m) - nname = '!' + name + nname = "!" + name self.names[name] = m self.names[nname] = -m if m not in {TRUE, FALSE} and m not in self.indices: @@ -166,10 +166,14 @@ def Any(self, vals, polarity=None, name=None): return self._eval(self._clauses.Any, (list(vals),), (), polarity, name) def AtMostOne_NSQ(self, vals, polarity=None, name=None): - return self._eval(self._clauses.AtMostOne_NSQ, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.AtMostOne_NSQ, (list(vals),), (), polarity, name + ) def AtMostOne_BDD(self, vals, polarity=None, name=None): - return self._eval(self._clauses.AtMostOne_BDD, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.AtMostOne_BDD, (list(vals),), (), polarity, name + ) def AtMostOne(self, vals, polarity=None, name=None): vals = list(vals) @@ -181,10 +185,14 @@ def AtMostOne(self, vals, polarity=None, name=None): return self._eval(what, (vals,), (), polarity, name) def ExactlyOne_NSQ(self, vals, polarity=None, name=None): - return self._eval(self._clauses.ExactlyOne_NSQ, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.ExactlyOne_NSQ, (list(vals),), (), polarity, name + ) def ExactlyOne_BDD(self, vals, polarity=None, name=None): - return self._eval(self._clauses.ExactlyOne_BDD, (list(vals),), (), polarity, name) + return self._eval( + self._clauses.ExactlyOne_BDD, (list(vals),), (), polarity, name + ) def ExactlyOne(self, vals, polarity=None, name=None): vals = list(vals) @@ -203,7 +211,10 @@ def LinearBound(self, equation, lo, hi, preprocess=True, polarity=None, name=Non coefficients = list(equation.values()) return self._eval( self._clauses.LinearBound, - (named_literals,), (coefficients, lo, hi, preprocess), polarity, name, + (named_literals,), + (coefficients, lo, hi, preprocess), + polarity, + name, ) def sat(self, additional=None, includeIf=False, names=False, limit=0): @@ -220,11 +231,17 @@ def sat(self, additional=None, includeIf=False, names=False, limit=0): return set() if names else [] if additional: additional = (tuple(self.names.get(c, c) for c in cc) for cc in additional) - solution = self._clauses.sat(additional=additional, includeIf=includeIf, limit=limit) + solution = self._clauses.sat( + additional=additional, includeIf=includeIf, limit=limit + ) if solution is None: return None if names: - return {nm for nm in (self.indices.get(s) for s in solution) if nm and nm[0] != "!"} + return { + nm + for nm in (self.indices.get(s) for s in solution) + if nm and nm[0] != "!" + } return solution def itersolve(self, constraints=None, m=None): @@ -277,8 +294,17 @@ def minimal_unsatisfiable_subset(clauses, sat, explicit_specs): # we succeeded, so we'll add the spec to our future constraints working_set = set(explicit_specs) - for spec in (set(clauses) - working_set): - if sat(working_set | {spec, }, True) is None: + for spec in set(clauses) - working_set: + if ( + sat( + working_set + | { + spec, + }, + True, + ) + is None + ): found_conflicts.add(spec) else: # we succeeded, so we'll add the spec to our future constraints diff --git a/conda/common/path.py b/conda/common/path.py index ae8fe27a57a..ca9d5e75799 100644 --- a/conda/common/path.py +++ b/conda/common/path.py @@ -2,32 +2,40 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from functools import lru_cache, reduce -from itertools import accumulate, chain -from logging import getLogger import os -from os.path import abspath, basename, expanduser, expandvars, join, normcase, split, splitext import re import subprocess +from distutils.spawn import find_executable +from functools import lru_cache, reduce +from itertools import accumulate, chain +from logging import getLogger +from os.path import ( + abspath, + basename, + expanduser, + expandvars, + join, + normcase, + split, + splitext, +) from typing import Iterable, Sequence from urllib.parse import urlsplit -from .compat import on_win from .. import CondaError from ..deprecations import deprecated -from distutils.spawn import find_executable - +from .compat import on_win log = getLogger(__name__) PATH_MATCH_REGEX = ( - r"\./" # ./ - r"|\.\." # .. - r"|~" # ~ - r"|/" # / + r"\./" # ./ + r"|\.\." # .. + r"|~" # ~ + r"|/" # / r"|[a-zA-Z]:[/\\]" # drive letter, colon, forward or backslash - r"|\\\\" # windows UNC path - r"|//" # windows UNC path + r"|\\\\" # windows UNC path + r"|//" # windows UNC path ) # any other extension will be mangled by CondaSession.get() as it tries to find @@ -36,7 +44,7 @@ def is_path(value): - if '://' in value: + if "://" in value: return False return re.match(PATH_MATCH_REGEX, value) @@ -67,19 +75,22 @@ def url_to_path(url): if is_path(url): return url if not url.startswith("file://"): # pragma: no cover - raise CondaError("You can only turn absolute file: urls into paths (not %s)" % url) + raise CondaError( + "You can only turn absolute file: urls into paths (not %s)" % url + ) _, netloc, path, _, _ = urlsplit(url) from .url import percent_decode + path = percent_decode(path) - if netloc not in ('', 'localhost', '127.0.0.1', '::1'): - if not netloc.startswith('\\\\'): + if netloc not in ("", "localhost", "127.0.0.1", "::1"): + if not netloc.startswith("\\\\"): # The only net location potentially accessible is a Windows UNC path - netloc = '//' + netloc + netloc = "//" + netloc else: - netloc = '' + netloc = "" # Handle Windows drive letters if present - if re.match('^/([a-z])[:|]', path, re.I): - path = path[1] + ':' + path[3:] + if re.match("^/([a-z])[:|]", path, re.I): + path = path[1] + ":" + path[3:] return netloc + path @@ -104,6 +115,7 @@ def _process(x, y): if not tokenized_startswith(y, x): leaves.append(x) return y + last = reduce(_process, directories) if not leaves: @@ -111,7 +123,7 @@ def _process(x, y): elif not tokenized_startswith(last, leaves[-1]): leaves.append(last) - return tuple('/'.join(leaf) for leaf in leaves) + return tuple("/".join(leaf) for leaf in leaves) @deprecated.argument("23.3", "23.9", "already_split") @@ -126,33 +138,36 @@ def explode_directories(child_directories: Iterable[tuple[str, ...]]) -> set[str def pyc_path(py_path, python_major_minor_version): - ''' + """ This must not return backslashes on Windows as that will break tests and leads to an eventual need to make url_to_path return backslashes too and that may end up changing files on disc or to the result of comparisons with the contents of them. - ''' - pyver_string = python_major_minor_version.replace('.', '') - if pyver_string.startswith('2'): - return py_path + 'c' + """ + pyver_string = python_major_minor_version.replace(".", "") + if pyver_string.startswith("2"): + return py_path + "c" else: directory, py_file = split(py_path) basename_root, extension = splitext(py_file) - pyc_file = "__pycache__" + "/" + f"{basename_root}.cpython-{pyver_string}{extension}c" + pyc_file = ( + "__pycache__" + "/" + f"{basename_root}.cpython-{pyver_string}{extension}c" + ) return "{}{}{}".format(directory, "/", pyc_file) if directory else pyc_file def missing_pyc_files(python_major_minor_version, files): # returns a tuple of tuples, with the inner tuple being the .py file and the missing .pyc file - py_files = (f for f in files if f.endswith('.py')) - pyc_matches = ((py_file, pyc_path(py_file, python_major_minor_version)) - for py_file in py_files) + py_files = (f for f in files if f.endswith(".py")) + pyc_matches = ( + (py_file, pyc_path(py_file, python_major_minor_version)) for py_file in py_files + ) result = tuple(match for match in pyc_matches if match[1] not in files) return result def parse_entry_point_def(ep_definition): - cmd_mod, func = ep_definition.rsplit(':', 1) + cmd_mod, func = ep_definition.rsplit(":", 1) command, module = cmd_mod.rsplit("=", 1) command, module, func = command.strip(), module.strip(), func.strip() return command, module, func @@ -161,23 +176,24 @@ def parse_entry_point_def(ep_definition): def get_python_short_path(python_version=None): if on_win: return "python.exe" - if python_version and '.' not in python_version: - python_version = '.'.join(python_version) - return join("bin", "python%s" % (python_version or '')) + if python_version and "." not in python_version: + python_version = ".".join(python_version) + return join("bin", "python%s" % (python_version or "")) def get_python_site_packages_short_path(python_version): if python_version is None: return None elif on_win: - return 'Lib/site-packages' + return "Lib/site-packages" else: py_ver = get_major_minor_version(python_version) - return 'lib/python%s/site-packages' % py_ver + return "lib/python%s/site-packages" % py_ver _VERSION_REGEX = re.compile(r"[0-9]+\.[0-9]+") + def get_major_minor_version(string, with_dot=True): # returns None if not found, otherwise two digits as a string # should work for @@ -192,14 +208,14 @@ def get_major_minor_version(string, with_dot=True): start = len("python") if len(pythonstr) < start + 2: return None - maj_min = pythonstr[start], pythonstr[start+1:] + maj_min = pythonstr[start], pythonstr[start + 1 :] elif string.startswith("bin/python"): pythonstr = string.split("/")[1] start = len("python") if len(pythonstr) < start + 3: return None - assert pythonstr[start+1] == "." - maj_min = pythonstr[start], pythonstr[start+2:] + assert pythonstr[start + 1] == "." + maj_min = pythonstr[start], pythonstr[start + 2 :] else: match = _VERSION_REGEX.match(string) if match: @@ -215,22 +231,22 @@ def get_major_minor_version(string, with_dot=True): def get_bin_directory_short_path(): - return 'Scripts' if on_win else 'bin' + return "Scripts" if on_win else "bin" def win_path_ok(path): - return path.replace('/', '\\') if on_win else path + return path.replace("/", "\\") if on_win else path def win_path_double_escape(path): - return path.replace('\\', '\\\\') if on_win else path + return path.replace("\\", "\\\\") if on_win else path def win_path_backout(path): # replace all backslashes except those escaping spaces # if we pass a file url, something like file://\\unc\path\on\win, make sure # we clean that up too - return re.sub(r"(\\(?! ))", r"/", path).replace(':////', '://') + return re.sub(r"(\\(?! ))", r"/", path).replace(":////", "://") def ensure_pad(name, pad="_"): @@ -288,16 +304,16 @@ def right_pad_os_sep(path): def split_filename(path_or_url): dn, fn = split(path_or_url) - return (dn or None, fn) if '.' in fn else (path_or_url, None) + return (dn or None, fn) if "." in fn else (path_or_url, None) def get_python_noarch_target_path(source_short_path, target_site_packages_short_path): - if source_short_path.startswith('site-packages/'): + if source_short_path.startswith("site-packages/"): sp_dir = target_site_packages_short_path - return source_short_path.replace('site-packages', sp_dir, 1) - elif source_short_path.startswith('python-scripts/'): + return source_short_path.replace("site-packages", sp_dir, 1) + elif source_short_path.startswith("python-scripts/"): bin_dir = get_bin_directory_short_path() - return source_short_path.replace('python-scripts', bin_dir, 1) + return source_short_path.replace("python-scripts", bin_dir, 1) else: return source_short_path @@ -309,21 +325,34 @@ def win_path_to_unix(path, root_prefix=""): # CYGPATH to e.g. /usr/bin/cygpath.exe (this will be translated to e.g. # (C:\msys32\usr\bin\cygpath.exe by MSYS2) to ensure this one is used. if not path: - return '' - bash = which('bash') + return "" + bash = which("bash") if bash: - cygpath = os.environ.get('CYGPATH', os.path.join(os.path.dirname(bash), 'cygpath.exe')) + cygpath = os.environ.get( + "CYGPATH", os.path.join(os.path.dirname(bash), "cygpath.exe") + ) else: - cygpath = os.environ.get('CYGPATH', 'cygpath.exe') + cygpath = os.environ.get("CYGPATH", "cygpath.exe") try: - path = subprocess.check_output([cygpath, '-up', path]).decode('ascii').split('\n')[0] + path = ( + subprocess.check_output([cygpath, "-up", path]) + .decode("ascii") + .split("\n")[0] + ) except Exception as e: - log.debug('%r' % e, exc_info=True) + log.debug("%r" % e, exc_info=True) + # Convert a path or ;-separated string of paths into a unix representation # Does not add cygdrive. If you need that, set root_prefix to "/cygdrive" def _translation(found_path): # NOQA - found = found_path.group(1).replace("\\", "/").replace(":", "").replace("//", "/") + found = ( + found_path.group(1) + .replace("\\", "/") + .replace(":", "") + .replace("//", "/") + ) return root_prefix + "/" + found + path_re = '(?|]+[/\\\\]+)*[^:*?"<>|;/\\\\]+?(?![a-zA-Z]:))' # noqa path = re.sub(path_re, _translation, path).replace(";/", ":/") return path diff --git a/conda/common/pkg_formats/python.py b/conda/common/pkg_formats/python.py index 96aac7117b8..b281f728f26 100644 --- a/conda/common/pkg_formats/python.py +++ b/conda/common/pkg_formats/python.py @@ -1,6 +1,9 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import platform +import re +import sys +import warnings from collections import namedtuple from configparser import ConfigParser from csv import reader as csv_reader @@ -9,37 +12,38 @@ from io import StringIO from itertools import chain from logging import getLogger -from os import name as os_name, scandir, strerror +from os import name as os_name +from os import scandir, strerror from os.path import basename, dirname, isdir, isfile, join, lexists -import platform from posixpath import normpath as posix_normpath -import re -import sys -import warnings from conda.common.iterators import groupby_to_dict as groupby from ... import CondaError +from ..._vendor.frozendict import frozendict +from ...auxlib.decorators import memoizedproperty from ..compat import open from ..path import ( - get_python_site_packages_short_path, pyc_path, win_path_ok, get_major_minor_version, + get_major_minor_version, + get_python_site_packages_short_path, + pyc_path, + win_path_ok, ) -from ...auxlib.decorators import memoizedproperty -from ..._vendor.frozendict import frozendict log = getLogger(__name__) # TODO: complete this list PYPI_TO_CONDA = { - 'graphviz': 'python-graphviz', + "graphviz": "python-graphviz", } # TODO: complete this list PYPI_CONDA_DEPS = { - 'graphviz': ['graphviz'], # What version constraints? + "graphviz": ["graphviz"], # What version constraints? } # This regex can process requirement including or not including name. # This is useful for parsing, for example, `Python-Version` -PARTIAL_PYPI_SPEC_PATTERN = re.compile(r''' +PARTIAL_PYPI_SPEC_PATTERN = re.compile( + r""" # Text needs to be stripped and all extra spaces replaced by single spaces (?P^[A-Z0-9][A-Z0-9._-]*)? \s? @@ -47,9 +51,11 @@ \s? (?P\(? \s? ([\w\d<>=!~,\s\.\*+-]*) \s? \)? )? \s? -''', re.VERBOSE | re.IGNORECASE) -PY_FILE_RE = re.compile(r'^[^\t\n\r\f\v]+/site-packages/[^\t\n\r\f\v]+\.py$') -PySpec = namedtuple('PySpec', ['name', 'extras', 'constraints', 'marker', 'url']) +""", + re.VERBOSE | re.IGNORECASE, +) +PY_FILE_RE = re.compile(r"^[^\t\n\r\f\v]+/site-packages/[^\t\n\r\f\v]+\.py$") +PySpec = namedtuple("PySpec", ["name", "extras", "constraints", "marker", "url"]) class MetadataWarning(Warning): @@ -62,29 +68,36 @@ class PythonDistribution: """ Base object describing a python distribution based on path to anchor file. """ - MANIFEST_FILES = () # Only one is used, but many names available + + MANIFEST_FILES = () # Only one is used, but many names available REQUIRES_FILES = () # Only one is used, but many names available MANDATORY_FILES = () - ENTRY_POINTS_FILES = ('entry_points.txt', ) + ENTRY_POINTS_FILES = ("entry_points.txt",) @staticmethod def init(prefix_path, anchor_file, python_version): - if anchor_file.endswith('.egg-link'): + if anchor_file.endswith(".egg-link"): return PythonEggLinkDistribution(prefix_path, anchor_file, python_version) elif ".dist-info" in anchor_file: return PythonInstalledDistribution(prefix_path, anchor_file, python_version) elif anchor_file.endswith(".egg-info"): anchor_full_path = join(prefix_path, win_path_ok(anchor_file)) sp_reference = basename(anchor_file) - return PythonEggInfoDistribution(anchor_full_path, python_version, sp_reference) + return PythonEggInfoDistribution( + anchor_full_path, python_version, sp_reference + ) elif ".egg-info" in anchor_file: anchor_full_path = join(prefix_path, win_path_ok(dirname(anchor_file))) sp_reference = basename(dirname(anchor_file)) - return PythonEggInfoDistribution(anchor_full_path, python_version, sp_reference) + return PythonEggInfoDistribution( + anchor_full_path, python_version, sp_reference + ) elif ".egg" in anchor_file: anchor_full_path = join(prefix_path, win_path_ok(dirname(anchor_file))) sp_reference = basename(dirname(anchor_file)) - return PythonEggInfoDistribution(anchor_full_path, python_version, sp_reference) + return PythonEggInfoDistribution( + anchor_full_path, python_version, sp_reference + ) else: raise NotImplementedError() @@ -116,7 +129,11 @@ def _check_files(self): def _check_path_data(self, path, checksum, size): """Normalizes record data content and format.""" if checksum: - assert checksum.startswith('sha256='), (self._metadata_dir_full_path, path, checksum) + assert checksum.startswith("sha256="), ( + self._metadata_dir_full_path, + path, + checksum, + ) checksum = checksum[7:] else: checksum = None @@ -125,20 +142,20 @@ def _check_path_data(self, path, checksum, size): return path, checksum, size @staticmethod - def _parse_requires_file_data(data, global_section='__global__'): + def _parse_requires_file_data(data, global_section="__global__"): """ https://setuptools.readthedocs.io/en/latest/formats.html#requires-txt """ requires = {} - lines = [line.strip() for line in data.split('\n') if line] + lines = [line.strip() for line in data.split("\n") if line] - if lines and not (lines[0].startswith('[') and lines[0].endswith(']')): + if lines and not (lines[0].startswith("[") and lines[0].endswith("]")): # Add dummy section for unsectioned items lines = [f"[{global_section}]"] + lines # Parse sections for line in lines: - if line.startswith('[') and line.endswith(']'): + if line.startswith("[") and line.endswith("]"): section = line.strip()[1:-1] requires[section] = [] continue @@ -153,17 +170,17 @@ def _parse_requires_file_data(data, global_section='__global__'): if section == global_section: # This is the global section (same as dist_requires) reqs.extend(values) - elif section.startswith(':'): + elif section.startswith(":"): # The section is used as a marker # Example: ":python_version < '3'" - marker = section.replace(':', '; ') - new_values = [v+marker for v in values] + marker = section.replace(":", "; ") + new_values = [v + marker for v in values] reqs.extend(new_values) else: # The section is an extra, i.e. "docs", or "tests"... extras.append(section) marker = f'; extra == "{section}"' - new_values = [v+marker for v in values] + new_values = [v + marker for v in values] reqs.extend(new_values) return frozenset(reqs), extras @@ -231,7 +248,9 @@ def get_paths(self): if manifest_full_path: python_version = self.python_version sp_dir = get_python_site_packages_short_path(python_version) + "/" - prepend_metadata_dirname = basename(manifest_full_path) == "installed-files.txt" + prepend_metadata_dirname = ( + basename(manifest_full_path) == "installed-files.txt" + ) if prepend_metadata_dirname: path_prepender = basename(dirname(manifest_full_path)) + "/" else: @@ -245,8 +264,11 @@ def process_csv_row(reader): if len(row) == 3: checksum, size = row[1:] if checksum: - assert checksum.startswith('sha256='), (self._metadata_dir_full_path, - cleaned_path, checksum) + assert checksum.startswith("sha256="), ( + self._metadata_dir_full_path, + cleaned_path, + checksum, + ) checksum = checksum[7:] else: checksum = None @@ -258,7 +280,7 @@ def process_csv_row(reader): records.append((cleaned_path, checksum, size)) return tuple(records) - csv_delimiter = ',' + csv_delimiter = "," with open(manifest_full_path) as csvfile: record_reader = csv_reader(csvfile, delimiter=csv_delimiter) # format of each record is (path, checksum, size) @@ -267,10 +289,16 @@ def process_csv_row(reader): _pyc_path, _py_file_re = pyc_path, PY_FILE_RE py_ver_mm = get_major_minor_version(python_version, with_dot=False) - missing_pyc_files = (ff for ff in ( - _pyc_path(f, py_ver_mm) for f in files_set if _py_file_re.match(f) - ) if ff not in files_set) - records = sorted((*records, *((pf, None, None) for pf in missing_pyc_files))) + missing_pyc_files = ( + ff + for ff in ( + _pyc_path(f, py_ver_mm) for f in files_set if _py_file_re.match(f) + ) + if ff not in files_set + ) + records = sorted( + (*records, *((pf, None, None) for pf in missing_pyc_files)) + ) return records return [] @@ -313,11 +341,15 @@ def get_conda_dependencies(self): This includes normalizing fields, and evaluating environment markers. """ - python_spec = "python %s.*" % ".".join(self.python_version.split('.')[:2]) + python_spec = "python %s.*" % ".".join(self.python_version.split(".")[:2]) def pyspec_to_norm_req(pyspec): conda_name = pypi_name_to_conda_name(norm_package_name(pyspec.name)) - return f"{conda_name} {pyspec.constraints}" if pyspec.constraints else conda_name + return ( + f"{conda_name} {pyspec.constraints}" + if pyspec.constraints + else conda_name + ) reqs = self.get_dist_requirements() pyspecs = tuple(parse_specification(req) for req in reqs) @@ -332,7 +364,9 @@ def pyspec_to_norm_req(pyspec): for pyspec in chain.from_iterable(marker_groups.values()) if interpret(pyspec.marker, execution_context) ) - constrains = {pyspec_to_norm_req(pyspec) for pyspec in extras if pyspec.constraints} + constrains = { + pyspec_to_norm_req(pyspec) for pyspec in extras if pyspec.constraints + } depends.add(python_spec) return sorted(depends), sorted(constrains) @@ -375,9 +409,10 @@ class PythonInstalledDistribution(PythonDistribution): ----- - https://www.python.org/dev/peps/pep-0376/ """ - MANIFEST_FILES = ('RECORD',) + + MANIFEST_FILES = ("RECORD",) REQUIRES_FILES = () - MANDATORY_FILES = ('METADATA', ) + MANDATORY_FILES = ("METADATA",) # FIXME: Do this check? Disabled for tests where only Metadata file is stored # MANDATORY_FILES = ('METADATA', 'RECORD', 'INSTALLER') ENTRY_POINTS_FILES = () @@ -398,10 +433,11 @@ class PythonEggInfoDistribution(PythonDistribution): ----- - http://peak.telecommunity.com/DevCenter/EggFormats """ - MANIFEST_FILES = ('installed-files.txt', 'SOURCES', 'SOURCES.txt') - REQUIRES_FILES = ('requires.txt', 'depends.txt') + + MANIFEST_FILES = ("installed-files.txt", "SOURCES", "SOURCES.txt") + REQUIRES_FILES = ("requires.txt", "depends.txt") MANDATORY_FILES = () - ENTRY_POINTS_FILES = ('entry_points.txt', ) + ENTRY_POINTS_FILES = ("entry_points.txt",) def __init__(self, anchor_full_path, python_version, sp_reference): super().__init__(anchor_full_path, python_version) @@ -410,12 +446,12 @@ def __init__(self, anchor_full_path, python_version, sp_reference): @property def is_manageable(self): return ( - self.manifest_full_path and basename(self.manifest_full_path) == "installed-files.txt" + self.manifest_full_path + and basename(self.manifest_full_path) == "installed-files.txt" ) class PythonEggLinkDistribution(PythonEggInfoDistribution): - is_manageable = False def __init__(self, prefix_path, anchor_file, python_version): @@ -427,6 +463,7 @@ def __init__(self, prefix_path, anchor_file, python_version): # Python distribution/eggs metadata # ----------------------------------------------------------------------------- + class PythonDistributionMetadata: """ Object representing the metada of a Python Distribution given by anchor @@ -445,47 +482,52 @@ class PythonDistributionMetadata: - Metadata 1.1: https://www.python.org/dev/peps/pep-0314/ - Metadata 1.0: https://www.python.org/dev/peps/pep-0241/ """ - FILE_NAMES = ('METADATA', 'PKG-INFO') + + FILE_NAMES = ("METADATA", "PKG-INFO") # Python Packages Metadata 2.1 # ----------------------------------------------------------------------------- - SINGLE_USE_KEYS = frozendict(( - ('Metadata-Version', 'metadata_version'), - ('Name', 'name'), - ('Version', 'version'), - # ('Summary', 'summary'), - # ('Description', 'description'), - # ('Description-Content-Type', 'description_content_type'), - # ('Keywords', 'keywords'), - # ('Home-page', 'home_page'), - # ('Download-URL', 'download_url'), - # ('Author', 'author'), - # ('Author-email', 'author_email'), - # ('Maintainer', 'maintainer'), - # ('Maintainer-email', 'maintainer_email'), - ('License', 'license'), - # # Deprecated - # ('Obsoleted-By', 'obsoleted_by'), # Note: See 2.0 - # ('Private-Version', 'private_version'), # Note: See 2.0 - )) - MULTIPLE_USE_KEYS = frozendict(( - ('Platform', 'platform'), - ('Supported-Platform', 'supported_platform'), - # ('Classifier', 'classifier'), - ('Requires-Dist', 'requires_dist'), - ('Requires-External', 'requires_external'), - ('Requires-Python', 'requires_python'), - # ('Project-URL', 'project_url'), - ('Provides-Extra', 'provides_extra'), - # ('Provides-Dist', 'provides_dist'), - # ('Obsoletes-Dist', 'obsoletes_dist'), - # # Deprecated - # ('Extension', 'extension'), # Note: See 2.0 - # ('Obsoletes', 'obsoletes'), - # ('Provides', 'provides'), - ('Requires', 'requires'), - # ('Setup-Requires-Dist', 'setup_requires_dist'), # Note: See 2.0 - )) + SINGLE_USE_KEYS = frozendict( + ( + ("Metadata-Version", "metadata_version"), + ("Name", "name"), + ("Version", "version"), + # ('Summary', 'summary'), + # ('Description', 'description'), + # ('Description-Content-Type', 'description_content_type'), + # ('Keywords', 'keywords'), + # ('Home-page', 'home_page'), + # ('Download-URL', 'download_url'), + # ('Author', 'author'), + # ('Author-email', 'author_email'), + # ('Maintainer', 'maintainer'), + # ('Maintainer-email', 'maintainer_email'), + ("License", "license"), + # # Deprecated + # ('Obsoleted-By', 'obsoleted_by'), # Note: See 2.0 + # ('Private-Version', 'private_version'), # Note: See 2.0 + ) + ) + MULTIPLE_USE_KEYS = frozendict( + ( + ("Platform", "platform"), + ("Supported-Platform", "supported_platform"), + # ('Classifier', 'classifier'), + ("Requires-Dist", "requires_dist"), + ("Requires-External", "requires_external"), + ("Requires-Python", "requires_python"), + # ('Project-URL', 'project_url'), + ("Provides-Extra", "provides_extra"), + # ('Provides-Dist', 'provides_dist'), + # ('Obsoletes-Dist', 'obsoletes_dist'), + # # Deprecated + # ('Extension', 'extension'), # Note: See 2.0 + # ('Obsoletes', 'obsoletes'), + # ('Provides', 'provides'), + ("Requires", "requires"), + # ('Setup-Requires-Dist', 'setup_requires_dist'), # Note: See 2.0 + ) + ) def __init__(self, path): metadata_path = self._process_path(path, self.FILE_NAMES) @@ -505,7 +547,7 @@ def _process_path(path, metadata_filenames): break elif isfile(path): # '.egg-info' file contains metadata directly - filenames = ['.egg-info'] + filenames = [".egg-info"] if metadata_filenames: filenames.extend(metadata_filenames) assert any(path.endswith(filename) for filename in filenames) @@ -545,7 +587,6 @@ def _message_to_dict(cls, message): if message: for key, value in message.items(): - if key in cls.MULTIPLE_USE_KEYS: new_key = cls.MULTIPLE_USE_KEYS[key] if new_key not in new_data: @@ -627,7 +668,7 @@ def get_dist_requirements(self): Return 'Requires' if 'Requires-Dist' is empty. """ - return self._get_multiple_data(['requires_dist', 'requires']) + return self._get_multiple_data(["requires_dist", "requires"]) def get_python_requirements(self): """ @@ -646,7 +687,7 @@ def get_python_requirements(self): frozenset(['>=3', '>2.6,!=3.0.*,!=3.1.*', '~=2.6', '>=3; sys_platform == "win32"']) """ - return self._get_multiple_data(['requires_python']) + return self._get_multiple_data(["requires_python"]) def get_external_requirements(self): """ @@ -674,7 +715,7 @@ def get_external_requirements(self): ------- frozenset(['C', 'libpng (>=1.5)', 'make; sys_platform != "win32"']) """ - return self._get_multiple_data(['requires_external']) + return self._get_multiple_data(["requires_external"]) def get_extra_provides(self): """ @@ -688,7 +729,7 @@ def get_extra_provides(self): ------- frozenset(['pdf', 'doc', 'test']) """ - return self._get_multiple_data(['provides_extra']) + return self._get_multiple_data(["provides_extra"]) def get_dist_provides(self): """ @@ -724,7 +765,7 @@ def get_dist_provides(self): Return `Provides` in case `Provides-Dist` is empty. """ - return self._get_multiple_data(['provides_dist', 'provides']) + return self._get_multiple_data(["provides_dist", "provides"]) def get_dist_obsolete(self): """ @@ -757,7 +798,7 @@ def get_dist_obsolete(self): - [1] https://packaging.python.org/specifications/version-specifiers/ """ - return self._get_multiple_data(['obsoletes_dist', 'obsoletes']) + return self._get_multiple_data(["obsoletes_dist", "obsoletes"]) def get_classifiers(self): """ @@ -771,38 +812,38 @@ def get_classifiers(self): frozenset(['Development Status :: 4 - Beta', "Environment :: Console (Text Based) ; os_name == "posix"]) """ - return self._get_multiple_data(['classifier']) + return self._get_multiple_data(["classifier"]) @property def name(self): - return self._data.get('name') # TODO: Check for existence? + return self._data.get("name") # TODO: Check for existence? @property def version(self): - return self._data.get('version') # TODO: Check for existence? + return self._data.get("version") # TODO: Check for existence? # Helper functions # ----------------------------------------------------------------------------- def norm_package_name(name): - return name.replace('.', '-').replace('_', '-').lower() if name else '' + return name.replace(".", "-").replace("_", "-").lower() if name else "" def pypi_name_to_conda_name(pypi_name): - return PYPI_TO_CONDA.get(pypi_name, pypi_name) if pypi_name else '' + return PYPI_TO_CONDA.get(pypi_name, pypi_name) if pypi_name else "" def norm_package_version(version): """Normalize a version by removing extra spaces and parentheses.""" if version: - version = ','.join(v.strip() for v in version.split(',')).strip() + version = ",".join(v.strip() for v in version.split(",")).strip() - if version.startswith('(') and version.endswith(')'): + if version.startswith("(") and version.endswith(")"): version = version[1:-1] - version = ''.join(v for v in version if v.strip()) + version = "".join(v for v in version if v.strip()) else: - version = '' + version = "" return version @@ -811,7 +852,7 @@ def split_spec(spec, sep): """Split a spec by separator and return stripped start and end parts.""" parts = spec.rsplit(sep, 1) spec_start = parts[0].strip() - spec_end = '' + spec_end = "" if len(parts) == 2: spec_end = parts[-1].strip() return spec_start, spec_end @@ -830,32 +871,32 @@ def parse_specification(spec): PySpec(name='requests', extras=['security'], constraints='>=3.3.0', marker='foo >= 2.7 or bar == 1', url='']) """ - name, extras, const = spec, [], '' + name, extras, const = spec, [], "" # Remove excess whitespace - spec = ' '.join(p for p in spec.split(' ') if p).strip() + spec = " ".join(p for p in spec.split(" ") if p).strip() # Extract marker (Assumes that there can only be one ';' inside the spec) - spec, marker = split_spec(spec, ';') + spec, marker = split_spec(spec, ";") # Extract url (Assumes that there can only be one '@' inside the spec) - spec, url = split_spec(spec, '@') + spec, url = split_spec(spec, "@") # Find name, extras and constraints r = PARTIAL_PYPI_SPEC_PATTERN.match(spec) if r: # Normalize name - name = r.group('name') + name = r.group("name") name = norm_package_name(name) # TODO: Do we want this or not? # Clean extras - extras = r.group('extras') - extras = [e.strip() for e in extras.split(',') if e] if extras else [] + extras = r.group("extras") + extras = [e.strip() for e in extras.split(",") if e] if extras else [] # Clean constraints - const = r.group('constraints') - const = ''.join(c for c in const.split(' ') if c).strip() - if const.startswith('(') and const.endswith(')'): + const = r.group("constraints") + const = "".join(c for c in const.split(" ") if c).strip() + if const.startswith("(") and const.endswith(")"): # Remove parens const = const[1:-1] const = const.replace("-", ".") @@ -884,9 +925,9 @@ def get_site_packages_anchor_files(site_packages_path, site_packages_dir): # FIXME: If it is a .egg file, we need to unzip the content to be # able. Do this once and leave the directory, and remove the egg # (which is a zip file in disguise?) - elif fname.endswith('.egg-link'): + elif fname.endswith(".egg-link"): anchor_file = f"{site_packages_dir}/{fname}" - elif fname.endswith('.pth'): + elif fname.endswith(".pth"): continue else: continue @@ -912,13 +953,14 @@ def get_dist_file_from_egg_link(egg_link_file, prefix_path): egg_link_contents = fh.readlines()[0].strip() except UnicodeDecodeError: from locale import getpreferredencoding + with open(egg_link_path, encoding=getpreferredencoding()) as fh: egg_link_contents = fh.readlines()[0].strip() if lexists(egg_link_contents): egg_info_fnames = tuple( - name for name in - (entry.name for entry in scandir(egg_link_contents)) + name + for name in (entry.name for entry in scandir(egg_link_contents)) if name[-9:] == ".egg-info" ) else: @@ -927,11 +969,13 @@ def get_dist_file_from_egg_link(egg_link_file, prefix_path): if egg_info_fnames: if len(egg_info_fnames) != 1: raise CondaError( - "Expected exactly one `egg-info` directory in '{}', via egg-link '{}'." - " Instead found: {}. These are often left over from " - "legacy operations that did not clean up correctly. Please " - "remove all but one of these.".format(egg_link_contents, - egg_link_file, egg_info_fnames)) + "Expected exactly one `egg-info` directory in '{}', via egg-link '{}'." + " Instead found: {}. These are often left over from " + "legacy operations that did not clean up correctly. Please " + "remove all but one of these.".format( + egg_link_contents, egg_link_file, egg_info_fnames + ) + ) egg_info_full_path = join(egg_link_contents, egg_info_fnames[0]) @@ -944,7 +988,6 @@ def get_dist_file_from_egg_link(egg_link_file, prefix_path): return egg_info_full_path - # See: https://bitbucket.org/pypa/distlib/src/34629e41cdff5c29429c7a4d1569ef5508b56929/distlib/util.py?at=default&fileviewer=file-view-default # NOQA # ------------------------------------------------------------------------------------------------ def parse_marker(marker_string): @@ -956,19 +999,20 @@ def parse_marker(marker_string): interpreted as a literal string, and a string not contained in quotes is a variable (such as os_name). """ + def marker_var(remaining): # either identifier, or literal string m = IDENTIFIER.match(remaining) if m: result = m.groups()[0] - remaining = remaining[m.end():] + remaining = remaining[m.end() :] elif not remaining: - raise SyntaxError('unexpected end of input') + raise SyntaxError("unexpected end of input") else: q = remaining[0] - if q not in '\'"': - raise SyntaxError('invalid expression: %s' % remaining) - oq = '\'"'.replace(q, '') + if q not in "'\"": + raise SyntaxError("invalid expression: %s" % remaining) + oq = "'\"".replace(q, "") remaining = remaining[1:] parts = [q] while remaining: @@ -981,22 +1025,22 @@ def marker_var(remaining): else: m = STRING_CHUNK.match(remaining) if not m: - raise SyntaxError('error in string literal: %s' % remaining) + raise SyntaxError("error in string literal: %s" % remaining) parts.append(m.groups()[0]) - remaining = remaining[m.end():] + remaining = remaining[m.end() :] else: - s = ''.join(parts) - raise SyntaxError('unterminated string: %s' % s) + s = "".join(parts) + raise SyntaxError("unterminated string: %s" % s) parts.append(q) - result = ''.join(parts) + result = "".join(parts) remaining = remaining[1:].lstrip() # skip past closing quote return result, remaining def marker_expr(remaining): - if remaining and remaining[0] == '(': + if remaining and remaining[0] == "(": result, remaining = marker(remaining[1:].lstrip()) - if remaining[0] != ')': - raise SyntaxError('unterminated parenthesis: %s' % remaining) + if remaining[0] != ")": + raise SyntaxError("unterminated parenthesis: %s" % remaining) remaining = remaining[1:].lstrip() else: lhs, remaining = marker_var(remaining) @@ -1005,9 +1049,9 @@ def marker_expr(remaining): if not m: break op = m.groups()[0] - remaining = remaining[m.end():] + remaining = remaining[m.end() :] rhs, remaining = marker_var(remaining) - lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} + lhs = {"op": op, "lhs": lhs, "rhs": rhs} result = lhs return result, remaining @@ -1017,9 +1061,9 @@ def marker_and(remaining): m = AND.match(remaining) if not m: break - remaining = remaining[m.end():] + remaining = remaining[m.end() :] rhs, remaining = marker_expr(remaining) - lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} + lhs = {"op": "and", "lhs": lhs, "rhs": rhs} return lhs, remaining def marker(remaining): @@ -1028,9 +1072,9 @@ def marker(remaining): m = OR.match(remaining) if not m: break - remaining = remaining[m.end():] + remaining = remaining[m.end() :] rhs, remaining = marker_and(remaining) - lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} + lhs = {"op": "or", "lhs": lhs, "rhs": rhs} return lhs, remaining return marker(marker_string) @@ -1043,20 +1087,20 @@ def marker(remaining): # # Requirement parsing code as per PEP 508 # -IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') -VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') -COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') -MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') -OR = re.compile(r'^or\b\s*') -AND = re.compile(r'^and\b\s*') -NON_SPACE = re.compile(r'(\S+)\s*') -STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') +IDENTIFIER = re.compile(r"^([\w\.-]+)\s*") +VERSION_IDENTIFIER = re.compile(r"^([\w\.*+-]+)\s*") +COMPARE_OP = re.compile(r"^(<=?|>=?|={2,3}|[~!]=)\s*") +MARKER_OP = re.compile(r"^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*") +OR = re.compile(r"^or\b\s*") +AND = re.compile(r"^and\b\s*") +NON_SPACE = re.compile(r"(\S+)\s*") +STRING_CHUNK = re.compile(r"([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)") def _is_literal(o): if not isinstance(o, str) or not o: return False - return o[0] in '\'"' + return o[0] in "'\"" class Evaluator: @@ -1065,18 +1109,18 @@ class Evaluator: """ operations = { - '==': lambda x, y: x == y, - '===': lambda x, y: x == y, - '~=': lambda x, y: x == y or x > y, - '!=': lambda x, y: x != y, - '<': lambda x, y: x < y, - '<=': lambda x, y: x == y or x < y, - '>': lambda x, y: x > y, - '>=': lambda x, y: x == y or x > y, - 'and': lambda x, y: x and y, - 'or': lambda x, y: x or y, - 'in': lambda x, y: x in y, - 'not in': lambda x, y: x not in y, + "==": lambda x, y: x == y, + "===": lambda x, y: x == y, + "~=": lambda x, y: x == y or x > y, + "!=": lambda x, y: x != y, + "<": lambda x, y: x < y, + "<=": lambda x, y: x == y or x < y, + ">": lambda x, y: x > y, + ">=": lambda x, y: x == y or x > y, + "and": lambda x, y: x and y, + "or": lambda x, y: x or y, + "in": lambda x, y: x in y, + "not in": lambda x, y: x not in y, } def evaluate(self, expr, context): @@ -1085,15 +1129,15 @@ def evaluate(self, expr, context): function in the specified context. """ if isinstance(expr, str): - if expr[0] in '\'"': + if expr[0] in "'\"": result = expr[1:-1] else: if expr not in context: - raise SyntaxError('unknown variable: %s' % expr) + raise SyntaxError("unknown variable: %s" % expr) result = context[expr] else: assert isinstance(expr, dict) - op = expr['op'] + op = expr["op"] if op not in self.operations: raise NotImplementedError("op not implemented: %s" % op) elhs = expr["lhs"] @@ -1125,38 +1169,38 @@ def get_default_marker_context(): def format_full_version(info): version = f"{info.major}.{info.minor}.{info.micro}" kind = info.releaselevel - if kind != 'final': + if kind != "final": version += kind[0] + str(info.serial) return version - if hasattr(sys, 'implementation'): + if hasattr(sys, "implementation"): implementation_version = format_full_version(sys.implementation.version) implementation_name = sys.implementation.name else: - implementation_version = '0' - implementation_name = '' + implementation_version = "0" + implementation_name = "" # TODO: we can't use this result = { # See: https://www.python.org/dev/peps/pep-0508/#environment-markers - 'implementation_name': implementation_name, - 'implementation_version': implementation_version, - 'os_name': os_name, - 'platform_machine': platform.machine(), - 'platform_python_implementation': platform.python_implementation(), - 'platform_release': platform.release(), - 'platform_system': platform.system(), - 'platform_version': platform.version(), - 'python_full_version': platform.python_version(), - 'python_version': '.'.join(platform.python_version().split('.')[:2]), - 'sys_platform': sys.platform, + "implementation_name": implementation_name, + "implementation_version": implementation_version, + "os_name": os_name, + "platform_machine": platform.machine(), + "platform_python_implementation": platform.python_implementation(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "python_version": ".".join(platform.python_version().split(".")[:2]), + "sys_platform": sys.platform, # See: https://www.python.org/dev/peps/pep-0345/#environment-markers - 'os.name': os_name, - 'platform.python_implementation': platform.python_implementation(), - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'sys.platform': sys.platform, - 'extra': '', + "os.name": os_name, + "platform.python_implementation": platform.python_implementation(), + "platform.version": platform.version(), + "platform.machine": platform.machine(), + "sys.platform": sys.platform, + "extra": "", } return result diff --git a/conda/common/serialize.py b/conda/common/serialize.py index d1557573dda..0ff4c60e4fd 100644 --- a/conda/common/serialize.py +++ b/conda/common/serialize.py @@ -1,9 +1,8 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from io import StringIO import functools import json +from io import StringIO from logging import getLogger from ..auxlib.entity import EntityEncoder @@ -14,7 +13,9 @@ try: import ruamel_yaml as yaml except ImportError: - raise ImportError("No yaml library available. To proceed, conda install ruamel.yaml") + raise ImportError( + "No yaml library available. To proceed, conda install ruamel.yaml" + ) log = getLogger(__name__) @@ -72,4 +73,6 @@ def json_load(string): def json_dump(object): - return json.dumps(object, indent=2, sort_keys=True, separators=(",", ": "), cls=EntityEncoder) + return json.dumps( + object, indent=2, sort_keys=True, separators=(",", ": "), cls=EntityEncoder + ) diff --git a/conda/common/signals.py b/conda/common/signals.py index f477f54948f..9a61f61f262 100644 --- a/conda/common/signals.py +++ b/conda/common/signals.py @@ -1,21 +1,21 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from contextlib import contextmanager -from logging import getLogger import signal import threading +from contextlib import contextmanager +from logging import getLogger log = getLogger(__name__) INTERRUPT_SIGNALS = ( - 'SIGABRT', - 'SIGINT', - 'SIGTERM', - 'SIGQUIT', - 'SIGBREAK', + "SIGABRT", + "SIGINT", + "SIGTERM", + "SIGQUIT", + "SIGBREAK", ) + def get_signal_name(signum): """ Examples: @@ -24,9 +24,14 @@ def get_signal_name(signum): 'SIGINT' """ - return next((k for k, v in signal.__dict__.items() - if v == signum and k.startswith('SIG') and not k.startswith('SIG_')), - None) + return next( + ( + k + for k, v in signal.__dict__.items() + if v == signum and k.startswith("SIG") and not k.startswith("SIG_") + ), + None, + ) @contextmanager @@ -44,7 +49,7 @@ def signal_handler(handler): _thread_local.previous_handlers.append((sig, prev_handler)) except ValueError as e: # pragma: no cover # ValueError: signal only works in main thread - log.debug('%r', e) + log.debug("%r", e) try: yield finally: diff --git a/conda/common/toposort.py b/conda/common/toposort.py index 998124110b5..be7ed49e223 100644 --- a/conda/common/toposort.py +++ b/conda/common/toposort.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from functools import reduce as _reduce from logging import getLogger @@ -9,11 +8,10 @@ def _toposort(data): """Dependencies are expressed as a dictionary whose keys are items -and whose values are a set of dependent items. Output is a list of -sets in topological order. The first set consists of items with no -dependences, each subsequent set consists of items that depend upon -items in the preceding sets. -""" + and whose values are a set of dependent items. Output is a list of + sets in topological order. The first set consists of items with no + dependences, each subsequent set consists of items that depend upon + items in the preceding sets.""" # Special case empty input. if len(data) == 0: @@ -27,7 +25,6 @@ def _toposort(data): # Add empty dependences where needed. data.update({item: set() for item in extra_items_in_deps}) while True: - ordered = sorted({item for item, dep in data.items() if len(dep) == 0}) if not ordered: break @@ -41,8 +38,10 @@ def _toposort(data): if len(data) != 0: from ..exceptions import CondaValueError - msg = 'Cyclic dependencies exist among these items: {}' - raise CondaValueError(msg.format(' -> '.join(repr(x) for x in data.keys()))) + + msg = "Cyclic dependencies exist among these items: {}" + raise CondaValueError(msg.format(" -> ".join(repr(x) for x in data.keys()))) + def pop_key(data): """ @@ -59,13 +58,13 @@ def pop_key(data): return key + def _safe_toposort(data): """Dependencies are expressed as a dictionary whose keys are items -and whose values are a set of dependent items. Output is a list of -sets in topological order. The first set consists of items with no -dependencies, each subsequent set consists of items that depend upon -items in the preceding sets. -""" + and whose values are a set of dependent items. Output is a list of + sets in topological order. The first set consists of items with no + dependencies, each subsequent set consists of items that depend upon + items in the preceding sets.""" # Special case empty input. if len(data) == 0: @@ -93,10 +92,9 @@ def _safe_toposort(data): def toposort(data, safe=True): - data = {k: set(v) for k, v in data.items()} - if 'python' in data: + if "python" in data: # Special case: Remove circular dependency between python and pip, # to ensure python is always installed before anything that needs it. # For more details: @@ -104,7 +102,7 @@ def toposort(data, safe=True): # - https://github.com/conda/conda/pull/1154 # - https://github.com/conda/conda-build/issues/401 # - https://github.com/conda/conda/pull/1614 - data['python'].discard('pip') + data["python"].discard("pip") if safe: return list(_safe_toposort(data)) diff --git a/conda/common/url.py b/conda/common/url.py index eca0065c957..a3a193ea900 100644 --- a/conda/common/url.py +++ b/conda/common/url.py @@ -1,66 +1,74 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import codecs +import re +import socket from collections import namedtuple from functools import lru_cache from getpass import getpass from os.path import abspath, expanduser -import re -import socket - -from ..deprecations import deprecated -from .compat import on_win -from .path import split_filename, strip_pkg_extension - -from urllib.parse import ( # NOQA +from urllib.parse import ( # noqa: F401 + ParseResult, quote, quote_plus, unquote, unquote_plus, - urlparse as _urlparse, - urlunparse as _urlunparse, - ParseResult, ) +from urllib.parse import urlparse as _urlparse +from urllib.parse import urlunparse as _urlunparse # noqa: F401 + +from ..deprecations import deprecated +from .compat import on_win +from .path import split_filename, strip_pkg_extension def hex_octal_to_int(ho): ho = ord(ho.upper()) - o0 = ord('0') - o9 = ord('9') - oA = ord('A') - oF = ord('F') - res = ho - o0 if ho >= o0 and ho <= o9 else (ho - oA + 10) if ho >= oA and ho <= oF else None + o0 = ord("0") + o9 = ord("9") + oA = ord("A") + oF = ord("F") + res = ( + ho - o0 + if ho >= o0 and ho <= o9 + else (ho - oA + 10) + if ho >= oA and ho <= oF + else None + ) return res @lru_cache(maxsize=None) def percent_decode(path): - # This is not fast so avoid when we can. - if '%' not in path: + if "%" not in path: return path ranges = [] - for m in re.finditer(r'(%[0-9A-F]{2})', path, flags=re.IGNORECASE): + for m in re.finditer(r"(%[0-9A-F]{2})", path, flags=re.IGNORECASE): ranges.append((m.start(), m.end())) if not len(ranges): return path # Sorry! Correctness is more important than speed at the moment. # Should use a map + lambda eventually. - result = b'' + result = b"" skips = 0 for i, c in enumerate(path): if skips > 0: skips -= 1 continue - c = c.encode('ascii') + c = c.encode("ascii") emit = c - if c == b'%': + if c == b"%": for r in ranges: if i == r[0]: import struct + emit = struct.pack( - "B", hex_octal_to_int(path[i+1])*16 + hex_octal_to_int(path[i+2])) + "B", + hex_octal_to_int(path[i + 1]) * 16 + + hex_octal_to_int(path[i + 2]), + ) skips = 2 break if emit: @@ -68,7 +76,7 @@ def percent_decode(path): return codecs.utf_8_decode(result)[0] -file_scheme = 'file://' +file_scheme = "file://" # Keeping this around for now, need to combine with the same function in conda/common/path.py """ @@ -85,14 +93,16 @@ def url_to_path(url): @lru_cache(maxsize=None) def path_to_url(path): if not path: - raise ValueError('Not allowed: %r' % path) + raise ValueError("Not allowed: %r" % path) if path.startswith(file_scheme): try: - path.decode('ascii') + path.decode("ascii") except UnicodeDecodeError: - raise ValueError('Non-ascii not allowed for things claiming to be URLs: %r' % path) + raise ValueError( + "Non-ascii not allowed for things claiming to be URLs: %r" % path + ) return path - path = abspath(expanduser(path)).replace('\\', '/') + path = abspath(expanduser(path)).replace("\\", "/") # We do not use urljoin here because we want to take our own # *very* explicit control of how paths get encoded into URLs. # We should not follow any RFCs on how to encode and decode @@ -107,17 +117,20 @@ def path_to_url(path): # for `file://` URLs. # percent_encode_chars = "!'()*-._/\\:" - percent_encode = lambda s: "".join(["%%%02X" % ord(c), c] - [c < "{" and c.isalnum() or c in percent_encode_chars] - for c in s) + percent_encode = lambda s: "".join( + ["%%%02X" % ord(c), c][c < "{" and c.isalnum() or c in percent_encode_chars] + for c in s + ) if any(ord(char) >= 128 for char in path): - path = percent_encode(path.decode('unicode-escape') - if hasattr(path, 'decode') - else bytes(path, "utf-8").decode('unicode-escape')) + path = percent_encode( + path.decode("unicode-escape") + if hasattr(path, "decode") + else bytes(path, "utf-8").decode("unicode-escape") + ) # https://blogs.msdn.microsoft.com/ie/2006/12/06/file-uris-in-windows/ - if len(path) > 1 and path[1] == ':': - path = file_scheme + '/' + path + if len(path) > 1 and path[1] == ":": + path = file_scheme + "/" + path else: path = file_scheme + path return path @@ -216,8 +229,8 @@ def from_parse_result(cls, parse_result: ParseResult) -> "Url": @lru_cache(maxsize=None) def urlparse(url: str) -> Url: - if on_win and url.startswith('file:'): - url.replace('\\', '/') + if on_win and url.startswith("file:"): + url.replace("\\", "/") # Allows us to pass in strings like 'example.com:8080/path/1'. if not has_scheme(url): url = "//" + url @@ -232,7 +245,7 @@ def url_to_s3_info(url): ('bucket-name.bucket', '/here/is/the/key') """ parsed_url = urlparse(url) - assert parsed_url.scheme == 's3', "You can only use s3: urls (not %r)" % url + assert parsed_url.scheme == "s3", "You can only use s3: urls (not %r)" % url bucket, key = parsed_url.hostname, parsed_url.path return bucket, key @@ -265,7 +278,7 @@ def is_ipv4_address(string_ip): socket.inet_aton(string_ip) except OSError: return False - return string_ip.count('.') == 3 + return string_ip.count(".") == 3 def is_ipv6_address(string_ip): @@ -297,15 +310,15 @@ def is_ip_address(string_ip): def join(*args): - start = '/' if not args[0] or args[0].startswith('/') else '' - return start + '/'.join(y for y in (x.strip('/') for x in args if x) if y) + start = "/" if not args[0] or args[0].startswith("/") else "" + return start + "/".join(y for y in (x.strip("/") for x in args if x) if y) join_url = join def has_scheme(value): - return re.match(r'[a-z][a-z0-9]{0,11}://', value) + return re.match(r"[a-z][a-z0-9]{0,11}://", value) def strip_scheme(url): @@ -316,7 +329,7 @@ def strip_scheme(url): >>> strip_scheme("s3://some.bucket/plus/a/path.ext") 'some.bucket/plus/a/path.ext' """ - return url.split('://', 1)[-1] + return url.split("://", 1)[-1] def mask_anaconda_token(url): @@ -340,10 +353,10 @@ def split_anaconda_token(url): >>> split_anaconda_token("https://10.2.3.4:8080/conda/t/tk-123-45") (u'https://10.2.3.4:8080/conda', u'tk-123-45') """ - _token_match = re.search(r'/t/([a-zA-Z0-9-]*)', url) + _token_match = re.search(r"/t/([a-zA-Z0-9-]*)", url) token = _token_match.groups()[0] if _token_match else None - cleaned_url = url.replace('/t/' + token, '', 1) if token is not None else url - return cleaned_url.rstrip('/'), token + cleaned_url = url.replace("/t/" + token, "", 1) if token is not None else url + return cleaned_url.rstrip("/"), token def split_platform(known_subdirs, url): @@ -357,13 +370,15 @@ def split_platform(known_subdirs, url): """ _platform_match = _split_platform_re(known_subdirs).search(url) platform = _platform_match.groups()[0] if _platform_match else None - cleaned_url = url.replace('/' + platform, '', 1) if platform is not None else url - return cleaned_url.rstrip('/'), platform + cleaned_url = url.replace("/" + platform, "", 1) if platform is not None else url + return cleaned_url.rstrip("/"), platform @lru_cache(maxsize=None) def _split_platform_re(known_subdirs): - _platform_match_regex = r'/(%s)(?:/|$)' % r'|'.join(r'%s' % d for d in known_subdirs) + _platform_match_regex = r"/(%s)(?:/|$)" % r"|".join( + r"%s" % d for d in known_subdirs + ) return re.compile(_platform_match_regex, re.IGNORECASE) @@ -371,7 +386,7 @@ def has_platform(url, known_subdirs): url_no_package_name, _ = split_filename(url) if not url_no_package_name: return None - maybe_a_platform = url_no_package_name.rsplit('/', 1)[-1] + maybe_a_platform = url_no_package_name.rsplit("/", 1)[-1] return maybe_a_platform in known_subdirs and maybe_a_platform or None @@ -403,7 +418,9 @@ def split_conda_url_easy_parts(known_subdirs, url): cleaned_url, platform = split_platform(known_subdirs, cleaned_url) _, ext = strip_pkg_extension(cleaned_url) cleaned_url, package_filename = ( - cleaned_url.rsplit("/", 1) if ext and "/" in cleaned_url else (cleaned_url, None) + cleaned_url.rsplit("/", 1) + if ext and "/" in cleaned_url + else (cleaned_url, None) ) # TODO: split out namespace using regex @@ -508,4 +525,5 @@ def escape_channel_url(channel): if __name__ == "__main__": import doctest + doctest.testmod() diff --git a/conda/core/envs_manager.py b/conda/core/envs_manager.py index 61dea53ca62..dcfb950a109 100644 --- a/conda/core/envs_manager.py +++ b/conda/core/envs_manager.py @@ -1,25 +1,24 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from errno import EACCES, EROFS, ENOENT -from logging import getLogger import os +from errno import EACCES, ENOENT, EROFS +from logging import getLogger from os.path import dirname, isdir, isfile, join, normpath -from .prefix_data import PrefixData from ..base.context import context -from ..common.compat import ensure_text_type, on_win, open from ..common._os import is_admin +from ..common.compat import ensure_text_type, on_win, open from ..common.path import expand from ..gateways.disk.read import yield_lines from ..gateways.disk.test import is_conda_environment +from .prefix_data import PrefixData log = getLogger(__name__) # The idea is to mock this to return '/dev/null' (or some temp file) instead. -def get_user_environments_txt_file(userhome='~'): - return expand(join(userhome, '.conda', 'environments.txt')) +def get_user_environments_txt_file(userhome="~"): + return expand(join(userhome, ".conda", "environments.txt")) def register_env(location): @@ -31,8 +30,11 @@ def register_env(location): except: pass - if ("placehold_pl" in location or "skeleton_" in location - or user_environments_txt_file == os.devnull): + if ( + "placehold_pl" in location + or "skeleton_" in location + or user_environments_txt_file == os.devnull + ): # Don't record envs created by conda-build. return @@ -41,21 +43,25 @@ def register_env(location): return try: - with open(user_environments_txt_file, 'a') as fh: + with open(user_environments_txt_file, "a") as fh: fh.write(ensure_text_type(location)) - fh.write('\n') + fh.write("\n") except OSError as e: if e.errno in (EACCES, EROFS, ENOENT): - log.warn("Unable to register environment. Path not writable or missing.\n" - " environment location: %s\n" - " registry file: %s", location, user_environments_txt_file) + log.warn( + "Unable to register environment. Path not writable or missing.\n" + " environment location: %s\n" + " registry file: %s", + location, + user_environments_txt_file, + ) else: raise def unregister_env(location): if isdir(location): - meta_dir = join(location, 'conda-meta') + meta_dir = join(location, "conda-meta") if isdir(meta_dir): meta_dir_contents = tuple(entry.name for entry in os.scandir(meta_dir)) if len(meta_dir_contents) > 1: @@ -71,13 +77,16 @@ def list_all_known_prefixes(): # If the user is an admin, load environments from all user home directories if is_admin(): if on_win: - home_dir_dir = dirname(expand('~')) + home_dir_dir = dirname(expand("~")) search_dirs = tuple(entry.path for entry in os.scandir(home_dir_dir)) else: from pwd import getpwall - search_dirs = tuple(pwentry.pw_dir for pwentry in getpwall()) or (expand('~'),) + + search_dirs = tuple(pwentry.pw_dir for pwentry in getpwall()) or ( + expand("~"), + ) else: - search_dirs = (expand('~'),) + search_dirs = (expand("~"),) for home_dir in filter(None, search_dirs): environments_txt_file = get_user_environments_txt_file(home_dir) if isfile(environments_txt_file): @@ -91,9 +100,13 @@ def list_all_known_prefixes(): # in case environments.txt files aren't complete, also add all known conda environments in # all envs_dirs envs_dirs = (envs_dir for envs_dir in context.envs_dirs if isdir(envs_dir)) - all_env_paths.update(path for path in ( - entry.path for envs_dir in envs_dirs for entry in os.scandir(envs_dir) - ) if path not in all_env_paths and is_conda_environment(path)) + all_env_paths.update( + path + for path in ( + entry.path for envs_dir in envs_dirs for entry in os.scandir(envs_dir) + ) + if path not in all_env_paths and is_conda_environment(path) + ) all_env_paths.add(context.root_prefix) return sorted(all_env_paths) @@ -114,7 +127,8 @@ def _clean_environments_txt(environments_txt_file, remove_location=None): remove_location = normpath(remove_location) environments_txt_lines = tuple(yield_lines(environments_txt_file)) environments_txt_lines_cleaned = tuple( - prefix for prefix in environments_txt_lines + prefix + for prefix in environments_txt_lines if prefix != remove_location and is_conda_environment(prefix) ) if environments_txt_lines_cleaned != environments_txt_lines: @@ -124,9 +138,9 @@ def _clean_environments_txt(environments_txt_file, remove_location=None): def _rewrite_environments_txt(environments_txt_file, prefixes): try: - with open(environments_txt_file, 'w') as fh: - fh.write('\n'.join(prefixes)) - fh.write('\n') + with open(environments_txt_file, "w") as fh: + fh.write("\n".join(prefixes)) + fh.write("\n") except OSError as e: log.info("File not cleaned: %s", environments_txt_file) - log.debug('%r', e, exc_info=True) + log.debug("%r", e, exc_info=True) diff --git a/conda/core/index.py b/conda/core/index.py index ece07911c66..609f84cced2 100644 --- a/conda/core/index.py +++ b/conda/core/index.py @@ -1,28 +1,27 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from itertools import chain -from logging import getLogger import platform import sys +from itertools import chain +from logging import getLogger try: from boltons.setutils import IndexedSet except ImportError: # pragma: no cover from .._vendor.boltons.setutils import IndexedSet -from ..deprecations import deprecated -from .package_cache_data import PackageCacheData -from .prefix_data import PrefixData -from .subdir_data import SubdirData, make_feature_record from ..base.context import context from ..common.io import ThreadLimitedThreadPoolExecutor, time_recorder +from ..deprecations import deprecated from ..exceptions import ChannelNotAllowed, InvalidSpec, PluginError from ..gateways.logging import initialize_logging from ..models.channel import Channel, all_channel_urls from ..models.enums import PackageType from ..models.match_spec import MatchSpec from ..models.records import EMPTY_LINK, PackageCacheRecord, PackageRecord, PrefixRecord +from .package_cache_data import PackageCacheData +from .prefix_data import PrefixData +from .subdir_data import SubdirData, make_feature_record log = getLogger(__name__) @@ -35,7 +34,9 @@ def check_whitelist(channel_urls): def check_allowlist(channel_urls): if context.allowlist_channels: allowlist_channel_urls = tuple( - chain.from_iterable(Channel(c).base_urls for c in context.allowlist_channels) + chain.from_iterable( + Channel(c).base_urls for c in context.allowlist_channels + ) ) for url in channel_urls: these_urls = Channel(url).base_urls @@ -45,10 +46,18 @@ def check_allowlist(channel_urls): LAST_CHANNEL_URLS = [] + @time_recorder("get_index") -def get_index(channel_urls=(), prepend=True, platform=None, - use_local=False, use_cache=False, unknown=None, prefix=None, - repodata_fn=context.repodata_fns[-1]): +def get_index( + channel_urls=(), + prepend=True, + platform=None, + use_local=False, + use_cache=False, + unknown=None, + prefix=None, + repodata_fn=context.repodata_fns[-1], +): """ Return the index of packages available on the channels @@ -78,11 +87,15 @@ def get_index(channel_urls=(), prepend=True, platform=None, return index -def fetch_index(channel_urls, use_cache=False, index=None, repodata_fn=context.repodata_fns[-1]): - log.debug('channel_urls=' + repr(channel_urls)) +def fetch_index( + channel_urls, use_cache=False, index=None, repodata_fn=context.repodata_fns[-1] +): + log.debug("channel_urls=" + repr(channel_urls)) index = {} with ThreadLimitedThreadPoolExecutor() as executor: - subdir_instantiator = lambda url: SubdirData(Channel(url), repodata_fn=repodata_fn) + subdir_instantiator = lambda url: SubdirData( + Channel(url), repodata_fn=repodata_fn + ) for f in executor.map(subdir_instantiator, channel_urls): index.update((rec, rec) for rec in f.iter_records()) return index @@ -103,7 +116,7 @@ def _supplement_index_with_prefix(index, prefix): # The downloaded repodata takes priority, so we do not overwrite. # We do, however, copy the link information so that the solver (i.e. resolve) # knows this package is installed. - link = prefix_record.get('link') or EMPTY_LINK + link = prefix_record.get("link") or EMPTY_LINK index[prefix_record] = PrefixRecord.from_objects( current_record, prefix_record, link=link ) @@ -142,15 +155,15 @@ def _supplement_index_with_cache(index): def _make_virtual_package(name, version=None, build_string=None): return PackageRecord( - package_type=PackageType.VIRTUAL_SYSTEM, - name=name, - version=version or '0', - build_string=build_string or '0', - channel='@', - subdir=context.subdir, - md5="12345678901234567890123456789012", - build_number=0, - fn=name, + package_type=PackageType.VIRTUAL_SYSTEM, + name=name, + version=version or "0", + build_string=build_string or "0", + channel="@", + subdir=context.subdir, + md5="12345678901234567890123456789012", + build_number=0, + fn=name, ) @@ -186,7 +199,7 @@ def _supplement_index_with_system(index): def get_archspec_name(): - from conda.base.context import non_x86_machines, _arch_names, _platform_map + from conda.base.context import _arch_names, _platform_map, non_x86_machines target_plat, target_arch = context.subdir.split("-") # This has to reverse what Context.subdir is doing @@ -200,25 +213,28 @@ def get_archspec_name(): return None # This has to match what Context.platform is doing - native_plat = _platform_map.get(sys.platform, 'unknown') + native_plat = _platform_map.get(sys.platform, "unknown") if native_plat != target_plat or platform.machine() != machine: return machine try: import archspec.cpu + return str(archspec.cpu.host()) except ImportError: return machine -def calculate_channel_urls(channel_urls=(), prepend=True, platform=None, use_local=False): +def calculate_channel_urls( + channel_urls=(), prepend=True, platform=None, use_local=False +): if use_local: - channel_urls = ['local'] + list(channel_urls) + channel_urls = ["local"] + list(channel_urls) if prepend: channel_urls += context.channels - subdirs = (platform, 'noarch') if platform is not None else context.subdirs + subdirs = (platform, "noarch") if platform is not None else context.subdirs return all_channel_urls(channel_urls, subdirs=subdirs) @@ -230,10 +246,10 @@ def get_reduced_index(prefix, channels, subdirs, specs, repodata_fn): pending_track_features = set() def push_spec(spec): - name = spec.get_raw_value('name') + name = spec.get_raw_value("name") if name and name not in collected_names: pending_names.add(name) - track_features = spec.get_raw_value('track_features') + track_features = spec.get_raw_value("track_features") if track_features: for ftr_name in track_features: if ftr_name not in collected_track_features: @@ -243,8 +259,11 @@ def push_record(record): try: combined_depends = record.combined_depends except InvalidSpec as e: - log.warning("Skipping %s due to InvalidSpec: %s", - record.record_id(), e._kwargs["invalid_spec"]) + log.warning( + "Skipping %s due to InvalidSpec: %s", + record.record_id(), + e._kwargs["invalid_spec"], + ) return push_spec(MatchSpec(record.name)) for _spec in combined_depends: @@ -264,8 +283,9 @@ def push_record(record): name = pending_names.pop() collected_names.add(name) spec = MatchSpec(name) - new_records = SubdirData.query_all(spec, channels=channels, subdirs=subdirs, - repodata_fn=repodata_fn) + new_records = SubdirData.query_all( + spec, channels=channels, subdirs=subdirs, repodata_fn=repodata_fn + ) for record in new_records: push_record(record) records.update(new_records) @@ -274,8 +294,9 @@ def push_record(record): feature_name = pending_track_features.pop() collected_track_features.add(feature_name) spec = MatchSpec(track_features=feature_name) - new_records = SubdirData.query_all(spec, channels=channels, subdirs=subdirs, - repodata_fn=repodata_fn) + new_records = SubdirData.query_all( + spec, channels=channels, subdirs=subdirs, repodata_fn=repodata_fn + ) for record in new_records: push_record(record) records.update(new_records) @@ -285,8 +306,9 @@ def push_record(record): if prefix is not None: _supplement_index_with_prefix(reduced_index, prefix) - if context.offline or ('unknown' in context._argparse_args - and context._argparse_args.unknown): + if context.offline or ( + "unknown" in context._argparse_args and context._argparse_args.unknown + ): # This is really messed up right now. Dates all the way back to # https://github.com/conda/conda/commit/f761f65a82b739562a0d997a2570e2b8a0bdc783 # TODO: revisit this later diff --git a/conda/core/initialize.py b/conda/core/initialize.py index 461fa01b45e..97e1659fb0e 100644 --- a/conda/core/initialize.py +++ b/conda/core/initialize.py @@ -28,30 +28,47 @@ """ +import json +import os +import re +import struct +import sys from difflib import unified_diff from errno import ENOENT from glob import glob from itertools import chain -import json from logging import getLogger -import os from os.path import abspath, basename, dirname, exists, expanduser, isdir, isfile, join from pathlib import Path from random import randint -import re -import sys -import struct -from .. import CONDA_PACKAGE_ROOT, CondaError, __version__ as CONDA_VERSION +from .. import CONDA_PACKAGE_ROOT, CondaError +from .. import __version__ as CONDA_VERSION +from ..activate import ( + CshActivator, + FishActivator, + PosixActivator, + PowerShellActivator, + XonshActivator, +) from ..auxlib.compat import Utf8NamedTemporaryFile from ..auxlib.ish import dals -from ..activate import (CshActivator, FishActivator, - PosixActivator, XonshActivator, PowerShellActivator) from ..base.context import context -from ..common.compat import (ensure_binary, ensure_utf8_encoding, - ensure_text_type, on_mac, on_win, open) -from ..common.path import (expand, get_bin_directory_short_path, get_python_short_path, - get_python_site_packages_short_path, win_path_ok) +from ..common.compat import ( + ensure_binary, + ensure_text_type, + ensure_utf8_encoding, + on_mac, + on_win, + open, +) +from ..common.path import ( + expand, + get_bin_directory_short_path, + get_python_short_path, + get_python_site_packages_short_path, + win_path_ok, +) from ..exceptions import CondaValueError from ..gateways.disk.create import copy, mkdir_p from ..gateways.disk.delete import rm_rf @@ -63,7 +80,8 @@ if on_win: # pragma: no cover import winreg - from menuinst.knownfolders import get_folder_path, FOLDERID + + from menuinst.knownfolders import FOLDERID, get_folder_path from menuinst.winshortcut import create_shortcut @@ -72,12 +90,13 @@ CONDA_INITIALIZE_RE_BLOCK = ( r"^# >>> conda initialize >>>(?:\n|\r\n)" r"([\s\S]*?)" - r"# <<< conda initialize <<<(?:\n|\r\n)?") + r"# <<< conda initialize <<<(?:\n|\r\n)?" +) CONDA_INITIALIZE_PS_RE_BLOCK = ( - r"^#region conda initialize(?:\n|\r\n)" - r"([\s\S]*?)" - r"#endregion(?:\n|\r\n)?") + r"^#region conda initialize(?:\n|\r\n)" r"([\s\S]*?)" r"#endregion(?:\n|\r\n)?" +) + class Result: NEEDS_SUDO = "needs sudo" @@ -89,25 +108,29 @@ class Result: # top-level functions # ##################################################### + def install(conda_prefix): plan = make_install_plan(conda_prefix) run_plan(plan) if not context.dry_run: - assert not any(step['result'] == Result.NEEDS_SUDO for step in plan) + assert not any(step["result"] == Result.NEEDS_SUDO for step in plan) print_plan_results(plan) return 0 -def initialize(conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=False): +def initialize( + conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=False +): plan1 = [] - if os.getenv('CONDA_PIP_UNINITIALIZED') == 'true': + if os.getenv("CONDA_PIP_UNINITIALIZED") == "true": plan1 = make_install_plan(conda_prefix) run_plan(plan1) if not context.dry_run: run_plan_elevated(plan1) - plan2 = make_initialize_plan(conda_prefix, shells, for_user, for_system, - anaconda_prompt, reverse=reverse) + plan2 = make_initialize_plan( + conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=reverse + ) run_plan(plan2) if not context.dry_run: run_plan_elevated(plan2) @@ -115,7 +138,7 @@ def initialize(conda_prefix, shells, for_user, for_system, anaconda_prompt, reve plan = plan1 + plan2 print_plan_results(plan) - if any(step['result'] == Result.NEEDS_SUDO for step in plan): + if any(step["result"] == Result.NEEDS_SUDO for step in plan): print("Operation failed.", file=sys.stderr) return 1 @@ -129,44 +152,56 @@ def initialize_dev(shell, dev_env_prefix=None, conda_source_root=None): python_exe, python_version, site_packages_dir = _get_python_info(prefix) - if not isfile(join(conda_source_root, 'conda', '__main__.py')): - raise CondaValueError("Directory is not a conda source root: %s" % conda_source_root) + if not isfile(join(conda_source_root, "conda", "__main__.py")): + raise CondaValueError( + "Directory is not a conda source root: %s" % conda_source_root + ) plan = make_install_plan(prefix) - plan.append({ - 'function': remove_conda_in_sp_dir.__name__, - 'kwargs': { - 'target_path': site_packages_dir, - }, - }) - plan.append({ - 'function': make_conda_egg_link.__name__, - 'kwargs': { - 'target_path': join(site_packages_dir, 'conda.egg-link'), - 'conda_source_root': conda_source_root, - }, - }) - plan.append({ - 'function': modify_easy_install_pth.__name__, - 'kwargs': { - 'target_path': join(site_packages_dir, 'easy-install.pth'), - 'conda_source_root': conda_source_root, - }, - }) - plan.append({ - 'function': make_dev_egg_info_file.__name__, - 'kwargs': { - 'target_path': join(conda_source_root, 'conda.egg-info'), - }, - }) + plan.append( + { + "function": remove_conda_in_sp_dir.__name__, + "kwargs": { + "target_path": site_packages_dir, + }, + } + ) + plan.append( + { + "function": make_conda_egg_link.__name__, + "kwargs": { + "target_path": join(site_packages_dir, "conda.egg-link"), + "conda_source_root": conda_source_root, + }, + } + ) + plan.append( + { + "function": modify_easy_install_pth.__name__, + "kwargs": { + "target_path": join(site_packages_dir, "easy-install.pth"), + "conda_source_root": conda_source_root, + }, + } + ) + plan.append( + { + "function": make_dev_egg_info_file.__name__, + "kwargs": { + "target_path": join(conda_source_root, "conda.egg-info"), + }, + } + ) run_plan(plan) if context.dry_run or context.verbosity: print_plan_results(plan, sys.stderr) - if any(step['result'] == Result.NEEDS_SUDO for step in plan): # pragma: no cover - raise CondaError("Operation failed. Privileged install disallowed for 'conda init --dev'.") + if any(step["result"] == Result.NEEDS_SUDO for step in plan): # pragma: no cover + raise CondaError( + "Operation failed. Privileged install disallowed for 'conda init --dev'." + ) env_vars = { "PYTHONHASHSEED": randint(0, 4294967296), @@ -174,16 +209,16 @@ def initialize_dev(shell, dev_env_prefix=None, conda_source_root=None): "TEST_PLATFORM": "win" if on_win else "unix", } unset_env_vars = ( - 'CONDA_DEFAULT_ENV', - 'CONDA_EXE', - '_CE_M', - '_CE_CONDA', - 'CONDA_PREFIX', - 'CONDA_PREFIX_1', - 'CONDA_PREFIX_2', - 'CONDA_PYTHON_EXE', - 'CONDA_PROMPT_MODIFIER', - 'CONDA_SHLVL', + "CONDA_DEFAULT_ENV", + "CONDA_EXE", + "_CE_M", + "_CE_CONDA", + "CONDA_PREFIX", + "CONDA_PREFIX_1", + "CONDA_PREFIX_2", + "CONDA_PYTHON_EXE", + "CONDA_PROMPT_MODIFIER", + "CONDA_SHLVL", ) if shell == "bash": @@ -191,7 +226,7 @@ def initialize_dev(shell, dev_env_prefix=None, conda_source_root=None): elif shell == "cmd.exe": script = _initialize_dev_cmdexe(prefix, env_vars, unset_env_vars) if not context.dry_run: - with open('dev-init.bat', 'w') as fh: + with open("dev-init.bat", "w") as fh: fh.write("\n".join(script)) if context.verbosity: print("\n".join(script)) @@ -208,7 +243,9 @@ def _initialize_dev_bash(prefix, env_vars, unset_env_vars): # unset/set environment variables yield from (f"unset {envvar}" for envvar in unset_env_vars) - yield from (f"export {envvar}='{value}'" for envvar, value in sorted(env_vars.items())) + yield from ( + f"export {envvar}='{value}'" for envvar, value in sorted(env_vars.items()) + ) # initialize shell interface yield f'eval "$("{sys_executable}" -m conda shell.bash hook)"' @@ -231,7 +268,9 @@ def _initialize_dev_cmdexe(prefix, env_vars, unset_env_vars): # unset/set environment variables yield from (f"@SET {envvar}=" for envvar in unset_env_vars) - yield from (f'@SET "{envvar}={value}"' for envvar, value in sorted(env_vars.items())) + yield from ( + f'@SET "{envvar}={value}"' for envvar, value in sorted(env_vars.items()) + ) # initialize shell interface yield f'@CALL "{condabin / "conda_hook.bat"}" {dev_arg}' @@ -247,6 +286,7 @@ def _initialize_dev_cmdexe(prefix, env_vars, unset_env_vars): # plan creators # ##################################################### + def make_install_plan(conda_prefix): try: python_exe, python_version, site_packages_dir = _get_python_info(conda_prefix) @@ -259,189 +299,249 @@ def make_install_plan(conda_prefix): # executables # ###################################### if on_win: - conda_exe_path = join(conda_prefix, 'Scripts', 'conda-script.py') - conda_env_exe_path = join(conda_prefix, 'Scripts', 'conda-env-script.py') - plan.append({ - 'function': make_entry_point_exe.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Scripts', 'conda.exe'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': make_entry_point_exe.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Scripts', 'conda-env.exe'), - 'conda_prefix': conda_prefix, - }, - }) + conda_exe_path = join(conda_prefix, "Scripts", "conda-script.py") + conda_env_exe_path = join(conda_prefix, "Scripts", "conda-env-script.py") + plan.append( + { + "function": make_entry_point_exe.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Scripts", "conda.exe"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": make_entry_point_exe.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Scripts", "conda-env.exe"), + "conda_prefix": conda_prefix, + }, + } + ) else: # We can't put a conda.exe in condabin on Windows. It'll conflict with conda.bat. - plan.append({ - 'function': make_entry_point.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda'), - 'conda_prefix': conda_prefix, - 'module': 'conda.cli', - 'func': 'main', + plan.append( + { + "function": make_entry_point.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "conda"), + "conda_prefix": conda_prefix, + "module": "conda.cli", + "func": "main", + }, + } + ) + conda_exe_path = join(conda_prefix, "bin", "conda") + conda_env_exe_path = join(conda_prefix, "bin", "conda-env") + + plan.append( + { + "function": make_entry_point.__name__, + "kwargs": { + "target_path": conda_exe_path, + "conda_prefix": conda_prefix, + "module": "conda.cli", + "func": "main", }, - }) - conda_exe_path = join(conda_prefix, 'bin', 'conda') - conda_env_exe_path = join(conda_prefix, 'bin', 'conda-env') - - plan.append({ - 'function': make_entry_point.__name__, - 'kwargs': { - 'target_path': conda_exe_path, - 'conda_prefix': conda_prefix, - 'module': 'conda.cli', - 'func': 'main', - }, - }) - plan.append({ - 'function': make_entry_point.__name__, - 'kwargs': { - 'target_path': conda_env_exe_path, - 'conda_prefix': conda_prefix, - 'module': 'conda_env.cli.main', - 'func': 'main', - }, - }) + } + ) + plan.append( + { + "function": make_entry_point.__name__, + "kwargs": { + "target_path": conda_env_exe_path, + "conda_prefix": conda_prefix, + "module": "conda_env.cli.main", + "func": "main", + }, + } + ) # ###################################### # shell wrappers # ###################################### if on_win: - plan.append({ - 'function': install_condabin_conda_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda.bat'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_library_bin_conda_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Library', 'bin', 'conda.bat'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_condabin_conda_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', '_conda_activate.bat'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_condabin_rename_tmp_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'rename_tmp.bat'), - 'conda_prefix': conda_prefix, + plan.append( + { + "function": install_condabin_conda_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "conda.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_library_bin_conda_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Library", "bin", "conda.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_conda_activate_bat.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "condabin", "_conda_activate.bat" + ), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_rename_tmp_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "rename_tmp.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_conda_auto_activate_bat.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "condabin", "conda_auto_activate.bat" + ), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_condabin_hook_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "conda_hook.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_Scripts_activate_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "Scripts", "activate.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_activate_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "activate.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + plan.append( + { + "function": install_deactivate_bat.__name__, + "kwargs": { + "target_path": join(conda_prefix, "condabin", "deactivate.bat"), + "conda_prefix": conda_prefix, + }, + } + ) + + plan.append( + { + "function": install_activate.__name__, + "kwargs": { + "target_path": join( + conda_prefix, get_bin_directory_short_path(), "activate" + ), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_condabin_conda_auto_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda_auto_activate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_deactivate.__name__, + "kwargs": { + "target_path": join( + conda_prefix, get_bin_directory_short_path(), "deactivate" + ), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_condabin_hook_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'conda_hook.bat'), - 'conda_prefix': conda_prefix, + } + ) + + plan.append( + { + "function": install_conda_sh.__name__, + "kwargs": { + "target_path": join(conda_prefix, "etc", "profile.d", "conda.sh"), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_Scripts_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'Scripts', 'activate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_conda_fish.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "etc", "fish", "conf.d", "conda.fish" + ), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_activate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'activate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_conda_psm1.__name__, + "kwargs": { + "target_path": join(conda_prefix, "shell", "condabin", "Conda.psm1"), + "conda_prefix": conda_prefix, }, - }) - plan.append({ - 'function': install_deactivate_bat.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'deactivate.bat'), - 'conda_prefix': conda_prefix, + } + ) + plan.append( + { + "function": install_conda_hook_ps1.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "shell", "condabin", "conda-hook.ps1" + ), + "conda_prefix": conda_prefix, }, - }) - - plan.append({ - 'function': install_activate.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, get_bin_directory_short_path(), 'activate'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_deactivate.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, get_bin_directory_short_path(), 'deactivate'), - 'conda_prefix': conda_prefix, - }, - }) - - plan.append({ - 'function': install_conda_sh.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'etc', 'profile.d', 'conda.sh'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_conda_fish.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'etc', 'fish', 'conf.d', 'conda.fish'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_conda_psm1.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'shell', 'condabin', 'Conda.psm1'), - 'conda_prefix': conda_prefix, - }, - }) - plan.append({ - 'function': install_conda_hook_ps1.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'shell', 'condabin', 'conda-hook.ps1'), - 'conda_prefix': conda_prefix, - }, - }) + } + ) if site_packages_dir: - plan.append({ - 'function': install_conda_xsh.__name__, - 'kwargs': { - 'target_path': join(site_packages_dir, 'xontrib', 'conda.xsh'), - 'conda_prefix': conda_prefix, - }, - }) + plan.append( + { + "function": install_conda_xsh.__name__, + "kwargs": { + "target_path": join(site_packages_dir, "xontrib", "conda.xsh"), + "conda_prefix": conda_prefix, + }, + } + ) else: - print("WARNING: Cannot install xonsh wrapper without a python interpreter in prefix: " - "%s" % conda_prefix, file=sys.stderr) - plan.append({ - 'function': install_conda_csh.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'etc', 'profile.d', 'conda.csh'), - 'conda_prefix': conda_prefix, - }, - }) + print( + "WARNING: Cannot install xonsh wrapper without a python interpreter in prefix: " + "%s" % conda_prefix, + file=sys.stderr, + ) + plan.append( + { + "function": install_conda_csh.__name__, + "kwargs": { + "target_path": join(conda_prefix, "etc", "profile.d", "conda.csh"), + "conda_prefix": conda_prefix, + }, + } + ) return plan -def make_initialize_plan(conda_prefix, shells, for_user, for_system, anaconda_prompt, - reverse=False): +def make_initialize_plan( + conda_prefix, shells, for_user, for_system, anaconda_prompt, reverse=False +): """ Creates a plan for initializing conda in shells. @@ -460,188 +560,222 @@ def make_initialize_plan(conda_prefix, shells, for_user, for_system, anaconda_pr """ plan = make_install_plan(conda_prefix) shells = set(shells) - if shells & {'bash', 'zsh'}: - if 'bash' in shells and for_user: - bashrc_path = expand(join('~', '.bash_profile' if (on_mac or on_win) else '.bashrc')) - plan.append({ - 'function': init_sh_user.__name__, - 'kwargs': { - 'target_path': bashrc_path, - 'conda_prefix': conda_prefix, - 'shell': 'bash', - 'reverse': reverse, - }, - }) + if shells & {"bash", "zsh"}: + if "bash" in shells and for_user: + bashrc_path = expand( + join("~", ".bash_profile" if (on_mac or on_win) else ".bashrc") + ) + plan.append( + { + "function": init_sh_user.__name__, + "kwargs": { + "target_path": bashrc_path, + "conda_prefix": conda_prefix, + "shell": "bash", + "reverse": reverse, + }, + } + ) - if 'zsh' in shells and for_user: - if 'ZDOTDIR' in os.environ: + if "zsh" in shells and for_user: + if "ZDOTDIR" in os.environ: zshrc_path = expand(join("$ZDOTDIR", ".zshrc")) else: - zshrc_path = expand(join('~', '.zshrc')) - plan.append({ - 'function': init_sh_user.__name__, - 'kwargs': { - 'target_path': zshrc_path, - 'conda_prefix': conda_prefix, - 'shell': 'zsh', - 'reverse': reverse, - }, - }) + zshrc_path = expand(join("~", ".zshrc")) + plan.append( + { + "function": init_sh_user.__name__, + "kwargs": { + "target_path": zshrc_path, + "conda_prefix": conda_prefix, + "shell": "zsh", + "reverse": reverse, + }, + } + ) if for_system: - plan.append({ - 'function': init_sh_system.__name__, - 'kwargs': { - 'target_path': '/etc/profile.d/conda.sh', - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": init_sh_system.__name__, + "kwargs": { + "target_path": "/etc/profile.d/conda.sh", + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) - if 'fish' in shells: + if "fish" in shells: if for_user: - config_fish_path = expand(join('~', '.config', 'fish', 'config.fish')) - plan.append({ - 'function': init_fish_user.__name__, - 'kwargs': { - 'target_path': config_fish_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + config_fish_path = expand(join("~", ".config", "fish", "config.fish")) + plan.append( + { + "function": init_fish_user.__name__, + "kwargs": { + "target_path": config_fish_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if for_system: - config_fish_path = expand(join('~', '.config', 'fish', 'config.fish')) - plan.append({ - 'function': init_fish_user.__name__, - 'kwargs': { - 'target_path': config_fish_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + config_fish_path = expand(join("~", ".config", "fish", "config.fish")) + plan.append( + { + "function": init_fish_user.__name__, + "kwargs": { + "target_path": config_fish_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) - if 'xonsh' in shells: + if "xonsh" in shells: if for_user: - config_xonsh_path = expand(join('~', '.xonshrc')) - plan.append({ - 'function': init_xonsh_user.__name__, - 'kwargs': { - 'target_path': config_xonsh_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + config_xonsh_path = expand(join("~", ".xonshrc")) + plan.append( + { + "function": init_xonsh_user.__name__, + "kwargs": { + "target_path": config_xonsh_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if for_system: if on_win: - config_xonsh_path = expand(join('%ALLUSERSPROFILE%', 'xonsh', 'xonshrc')) + config_xonsh_path = expand( + join("%ALLUSERSPROFILE%", "xonsh", "xonshrc") + ) else: - config_xonsh_path = '/etc/xonshrc' - plan.append({ - 'function': init_xonsh_user.__name__, - 'kwargs': { - 'target_path': config_xonsh_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, + config_xonsh_path = "/etc/xonshrc" + plan.append( + { + "function": init_xonsh_user.__name__, + "kwargs": { + "target_path": config_xonsh_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) + + if "tcsh" in shells and for_user: + tcshrc_path = expand(join("~", ".tcshrc")) + plan.append( + { + "function": init_sh_user.__name__, + "kwargs": { + "target_path": tcshrc_path, + "conda_prefix": conda_prefix, + "shell": "tcsh", + "reverse": reverse, }, - }) - - if 'tcsh' in shells and for_user: - tcshrc_path = expand(join('~', '.tcshrc')) - plan.append({ - 'function': init_sh_user.__name__, - 'kwargs': { - 'target_path': tcshrc_path, - 'conda_prefix': conda_prefix, - 'shell': 'tcsh', - 'reverse': reverse, - }, - }) + } + ) - if 'powershell' in shells: + if "powershell" in shells: if for_user: - profile = '$PROFILE.CurrentUserAllHosts' + profile = "$PROFILE.CurrentUserAllHosts" if for_system: - profile = '$PROFILE.AllUsersAllHosts' + profile = "$PROFILE.AllUsersAllHosts" def find_powershell_paths(*exe_names): for exe_name in exe_names: try: yield subprocess_call( - (exe_name, '-NoProfile', '-Command', profile) + (exe_name, "-NoProfile", "-Command", profile) ).stdout.strip() except Exception: pass config_powershell_paths = set( - find_powershell_paths('powershell', 'pwsh', 'pwsh-preview') + find_powershell_paths("powershell", "pwsh", "pwsh-preview") ) for config_path in config_powershell_paths: if config_path is not None: - plan.append({ - 'function': init_powershell_user.__name__, - 'kwargs': { - 'target_path': config_path, - 'conda_prefix': conda_prefix, - 'reverse': reverse, + plan.append( + { + "function": init_powershell_user.__name__, + "kwargs": { + "target_path": config_path, + "conda_prefix": conda_prefix, + "reverse": reverse, + }, } - }) + ) - if 'cmd.exe' in shells: + if "cmd.exe" in shells: if for_user: - plan.append({ - 'function': init_cmd_exe_registry.__name__, - 'kwargs': { - 'target_path': 'HKEY_CURRENT_USER\\Software\\Microsoft\\' - 'Command Processor\\AutoRun', - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": init_cmd_exe_registry.__name__, + "kwargs": { + "target_path": "HKEY_CURRENT_USER\\Software\\Microsoft\\" + "Command Processor\\AutoRun", + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if for_system: - plan.append({ - 'function': init_cmd_exe_registry.__name__, - 'kwargs': { - 'target_path': 'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\' - 'Command Processor\\AutoRun', - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": init_cmd_exe_registry.__name__, + "kwargs": { + "target_path": "HKEY_LOCAL_MACHINE\\Software\\Microsoft\\" + "Command Processor\\AutoRun", + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) # it would be nice to enable this on a user-level basis, but unfortunately, it is # a system-level key only. - plan.append({ - 'function': init_long_path.__name__, - 'kwargs': { - 'target_path': 'HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\' - 'FileSystem\\LongPathsEnabled' + plan.append( + { + "function": init_long_path.__name__, + "kwargs": { + "target_path": "HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\" + "FileSystem\\LongPathsEnabled" + }, } - }) + ) if anaconda_prompt: - plan.append({ - 'function': install_anaconda_prompt.__name__, - 'kwargs': { - 'target_path': join(conda_prefix, 'condabin', 'Anaconda Prompt.lnk'), - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + plan.append( + { + "function": install_anaconda_prompt.__name__, + "kwargs": { + "target_path": join( + conda_prefix, "condabin", "Anaconda Prompt.lnk" + ), + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) if on_win: desktop_dir, exception = get_folder_path(FOLDERID.Desktop) assert not exception else: - desktop_dir = join(expanduser('~'), "Desktop") - plan.append({ - 'function': install_anaconda_prompt.__name__, - 'kwargs': { - 'target_path': join(desktop_dir, "Anaconda Prompt.lnk"), - 'conda_prefix': conda_prefix, - 'reverse': reverse, - }, - }) + desktop_dir = join(expanduser("~"), "Desktop") + plan.append( + { + "function": install_anaconda_prompt.__name__, + "kwargs": { + "target_path": join(desktop_dir, "Anaconda Prompt.lnk"), + "conda_prefix": conda_prefix, + "reverse": reverse, + }, + } + ) return plan @@ -650,17 +784,20 @@ def find_powershell_paths(*exe_names): # plan runners # ##################################################### + def run_plan(plan): for step in plan: - previous_result = step.get('result', None) + previous_result = step.get("result", None) if previous_result in (Result.MODIFIED, Result.NO_CHANGE): continue try: - result = globals()[step['function']](*step.get('args', ()), **step.get('kwargs', {})) + result = globals()[step["function"]]( + *step.get("args", ()), **step.get("kwargs", {}) + ) except OSError as e: - log.info("%s: %r", step['function'], e, exc_info=True) + log.info("%s: %r", step["function"], e, exc_info=True) result = Result.NEEDS_SUDO - step['result'] = result + step["result"] = result def run_plan_elevated(plan): @@ -681,21 +818,29 @@ def run_plan_elevated(plan): """ - if any(step['result'] == Result.NEEDS_SUDO for step in plan): + if any(step["result"] == Result.NEEDS_SUDO for step in plan): if on_win: from ..common._os.windows import run_as_admin + temp_path = None try: - with Utf8NamedTemporaryFile('w+', suffix='.json', delete=False) as tf: + with Utf8NamedTemporaryFile("w+", suffix=".json", delete=False) as tf: # the default mode is 'w+b', and universal new lines don't work in that mode - tf.write(json.dumps(plan, ensure_ascii=False, default=lambda x: x.__dict__)) + tf.write( + json.dumps( + plan, ensure_ascii=False, default=lambda x: x.__dict__ + ) + ) temp_path = tf.name python_exe = '"%s"' % abspath(sys.executable) - hinstance, error_code = run_as_admin((python_exe, '-m', 'conda.core.initialize', - '"%s"' % temp_path)) + hinstance, error_code = run_as_admin( + (python_exe, "-m", "conda.core.initialize", '"%s"' % temp_path) + ) if error_code is not None: - print("ERROR during elevated execution.\n rc: %s" % error_code, - file=sys.stderr) + print( + "ERROR during elevated execution.\n rc: %s" % error_code, + file=sys.stderr, + ) with open(temp_path) as fh: _plan = json.loads(ensure_text_type(fh.read())) @@ -707,10 +852,10 @@ def run_plan_elevated(plan): else: stdin = json.dumps(plan, ensure_ascii=False, default=lambda x: x.__dict__) result = subprocess_call( - 'sudo %s -m conda.core.initialize' % sys.executable, + "sudo %s -m conda.core.initialize" % sys.executable, env={}, path=os.getcwd(), - stdin=stdin + stdin=stdin, ) stderr = result.stderr.strip() if stderr: @@ -732,7 +877,7 @@ def run_plan_from_temp_file(temp_path): with open(temp_path) as fh: plan = json.loads(ensure_text_type(fh.read())) run_plan(plan) - with open(temp_path, 'w+b') as fh: + with open(temp_path, "w+b") as fh: fh.write(ensure_binary(json.dumps(plan, ensure_ascii=False))) @@ -740,12 +885,16 @@ def print_plan_results(plan, stream=None): if not stream: stream = sys.stdout for step in plan: - print("%-14s%s" % (step.get('result'), step['kwargs']['target_path']), file=stream) + print( + "%-14s%s" % (step.get("result"), step["kwargs"]["target_path"]), file=stream + ) - changed = any(step.get('result') == Result.MODIFIED for step in plan) + changed = any(step.get("result") == Result.MODIFIED for step in plan) if changed: - print("\n==> For changes to take effect, close and re-open your current shell. <==\n", - file=stream) + print( + "\n==> For changes to take effect, close and re-open your current shell. <==\n", + file=stream, + ) else: print("No action taken.", file=stream) @@ -754,6 +903,7 @@ def print_plan_results(plan, stream=None): # individual operations # ##################################################### + def make_entry_point(target_path, conda_prefix, module, func): # 'ep' in this function refers to 'entry point' # target_path: join(conda_prefix, 'bin', 'conda') @@ -770,38 +920,47 @@ def make_entry_point(target_path, conda_prefix, module, func): new_ep_content = "" else: python_path = join(conda_prefix, get_python_short_path()) - new_ep_content = generate_shebang_for_entry_point(python_path, with_usr_bin_env=True) + new_ep_content = generate_shebang_for_entry_point( + python_path, with_usr_bin_env=True + ) - conda_extra = dals(""" + conda_extra = dals( + """ # Before any more imports, leave cwd out of sys.path for internal 'conda shell.*' commands. # see https://github.com/conda/conda/issues/6549 if len(sys.argv) > 1 and sys.argv[1].startswith('shell.') and sys.path and sys.path[0] == '': # The standard first entry in sys.path is an empty string, # and os.path.abspath('') expands to os.getcwd(). del sys.path[0] - """) + """ + ) - new_ep_content += dals(""" + new_ep_content += ( + dals( + """ # -*- coding: utf-8 -*- import sys %(extra)s if __name__ == '__main__': from %(module)s import %(func)s sys.exit(%(func)s()) - """) % { - 'extra': conda_extra if module == 'conda.cli' else '', - 'module': module, - 'func': func, - } + """ + ) + % { + "extra": conda_extra if module == "conda.cli" else "", + "module": module, + "func": func, + } + ) if new_ep_content != original_ep_content: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(original_ep_content, new_ep_content)) if not context.dry_run: mkdir_p(dirname(conda_ep_path)) - with open(conda_ep_path, 'w') as fdst: + with open(conda_ep_path, "w") as fdst: fdst.write(new_ep_content) if not on_win: make_executable(conda_ep_path) @@ -814,7 +973,7 @@ def make_entry_point_exe(target_path, conda_prefix): # target_path: join(conda_prefix, 'Scripts', 'conda.exe') exe_path = target_path bits = 8 * struct.calcsize("P") - source_exe_path = join(CONDA_PACKAGE_ROOT, 'shell', 'cli-%d.exe' % bits) + source_exe_path = join(CONDA_PACKAGE_ROOT, "shell", "cli-%d.exe" % bits) if isfile(exe_path): if compute_sum(exe_path, "md5") == compute_sum(source_exe_path, "md5"): return Result.NO_CHANGE @@ -831,7 +990,7 @@ def make_entry_point_exe(target_path, conda_prefix): def install_anaconda_prompt(target_path, conda_prefix, reverse): # target_path: join(conda_prefix, 'condabin', 'Anaconda Prompt.lnk') # target: join(os.environ["HOMEPATH"], "Desktop", "Anaconda Prompt.lnk") - icon_path = join(CONDA_PACKAGE_ROOT, 'shell', 'conda_icon.ico') + icon_path = join(CONDA_PACKAGE_ROOT, "shell", "conda_icon.ico") target = join(os.environ["HOMEPATH"], "Desktop", "Anaconda Prompt.lnk") args = ( @@ -849,10 +1008,10 @@ def install_anaconda_prompt(target_path, conda_prefix, reverse): create_shortcut( "%windir%\\System32\\cmd.exe", "Anconda Prompt", - '' + target_path, - ' '.join(args), - '' + expanduser('~'), - '' + icon_path, + "" + target_path, + " ".join(args), + "" + expanduser("~"), + "" + icon_path, ) result = Result.MODIFIED if reverse: @@ -873,12 +1032,12 @@ def _install_file(target_path, file_content): if new_content != original_content: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(original_content, new_content)) if not context.dry_run: mkdir_p(dirname(target_path)) - with open(target_path, 'w') as fdst: + with open(target_path, "w") as fdst: fdst.write(new_content) return Result.MODIFIED else: @@ -893,7 +1052,7 @@ def install_conda_sh(target_path, conda_prefix): def install_Scripts_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'Scripts', 'activate.bat') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'Scripts', 'activate.bat') + src_path = join(CONDA_PACKAGE_ROOT, "shell", "Scripts", "activate.bat") with open(src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -901,7 +1060,7 @@ def install_Scripts_activate_bat(target_path, conda_prefix): def install_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'activate.bat') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'activate.bat') + src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "activate.bat") with open(src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -909,7 +1068,7 @@ def install_activate_bat(target_path, conda_prefix): def install_deactivate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'deactivate.bat') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'deactivate.bat') + src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "deactivate.bat") with open(src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -917,11 +1076,8 @@ def install_deactivate_bat(target_path, conda_prefix): def install_activate(target_path, conda_prefix): # target_path: join(conda_prefix, get_bin_directory_short_path(), 'activate') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'bin', 'activate') - file_content = ( - "#!/bin/sh\n" - "_CONDA_ROOT=\"%s\"\n" - ) % conda_prefix + src_path = join(CONDA_PACKAGE_ROOT, "shell", "bin", "activate") + file_content = ("#!/bin/sh\n" '_CONDA_ROOT="%s"\n') % conda_prefix with open(src_path) as fsrc: file_content += fsrc.read() return _install_file(target_path, file_content) @@ -929,11 +1085,8 @@ def install_activate(target_path, conda_prefix): def install_deactivate(target_path, conda_prefix): # target_path: join(conda_prefix, get_bin_directory_short_path(), 'deactivate') - src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'bin', 'deactivate') - file_content = ( - "#!/bin/sh\n" - "_CONDA_ROOT=\"%s\"\n" - ) % conda_prefix + src_path = join(CONDA_PACKAGE_ROOT, "shell", "bin", "deactivate") + file_content = ("#!/bin/sh\n" '_CONDA_ROOT="%s"\n') % conda_prefix with open(src_path) as fsrc: file_content += fsrc.read() return _install_file(target_path, file_content) @@ -941,7 +1094,7 @@ def install_deactivate(target_path, conda_prefix): def install_condabin_conda_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'conda.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda.bat') + conda_bat_src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda.bat") with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -949,7 +1102,9 @@ def install_condabin_conda_bat(target_path, conda_prefix): def install_library_bin_conda_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'Library', 'bin', 'conda.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'Library', 'bin', 'conda.bat') + conda_bat_src_path = join( + CONDA_PACKAGE_ROOT, "shell", "Library", "bin", "conda.bat" + ) with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -957,7 +1112,9 @@ def install_library_bin_conda_bat(target_path, conda_prefix): def install_condabin_conda_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', '_conda_activate.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', '_conda_activate.bat') + conda_bat_src_path = join( + CONDA_PACKAGE_ROOT, "shell", "condabin", "_conda_activate.bat" + ) with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -965,7 +1122,7 @@ def install_condabin_conda_activate_bat(target_path, conda_prefix): def install_condabin_rename_tmp_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'rename_tmp.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'rename_tmp.bat') + conda_bat_src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "rename_tmp.bat") with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -973,7 +1130,9 @@ def install_condabin_rename_tmp_bat(target_path, conda_prefix): def install_condabin_conda_auto_activate_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'conda_auto_activate.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda_auto_activate.bat') + conda_bat_src_path = join( + CONDA_PACKAGE_ROOT, "shell", "condabin", "conda_auto_activate.bat" + ) with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -981,7 +1140,7 @@ def install_condabin_conda_auto_activate_bat(target_path, conda_prefix): def install_condabin_hook_bat(target_path, conda_prefix): # target_path: join(conda_prefix, 'condabin', 'conda_hook.bat') - conda_bat_src_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda_hook.bat') + conda_bat_src_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda_hook.bat") with open(conda_bat_src_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -992,9 +1151,10 @@ def install_conda_fish(target_path, conda_prefix): file_content = FishActivator().hook(auto_activate_base=False) return _install_file(target_path, file_content) + def install_conda_psm1(target_path, conda_prefix): # target_path: join(conda_prefix, 'shell', 'condabin', 'Conda.psm1') - conda_psm1_path = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'Conda.psm1') + conda_psm1_path = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "Conda.psm1") with open(conda_psm1_path) as fsrc: file_content = fsrc.read() return _install_file(target_path, file_content) @@ -1005,6 +1165,7 @@ def install_conda_hook_ps1(target_path, conda_prefix): file_content = PowerShellActivator().hook(auto_activate_base=False) return _install_file(target_path, file_content) + def install_conda_xsh(target_path, conda_prefix): # target_path: join(site_packages_dir, 'xonsh', 'conda.xsh') file_content = XonshActivator().hook(auto_activate_base=False) @@ -1020,19 +1181,25 @@ def install_conda_csh(target_path, conda_prefix): def _config_fish_content(conda_prefix): if on_win: from ..activate import native_path_to_unix - conda_exe = native_path_to_unix(join(conda_prefix, 'Scripts', 'conda.exe')) + + conda_exe = native_path_to_unix(join(conda_prefix, "Scripts", "conda.exe")) else: - conda_exe = join(conda_prefix, 'bin', 'conda') - conda_initialize_content = dals(""" + conda_exe = join(conda_prefix, "bin", "conda") + conda_initialize_content = ( + dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! if test -f %(conda_exe)s eval %(conda_exe)s "shell.fish" "hook" $argv | source end # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, - } + """ + ) + % { + "conda_exe": conda_exe, + } + ) return conda_initialize_content @@ -1044,7 +1211,7 @@ def init_fish_user(target_path, conda_prefix, reverse): with open(user_rc_path) as fh: rc_content = fh.read() except FileNotFoundError: - rc_content = '' + rc_content = "" except: raise @@ -1066,7 +1233,7 @@ def init_fish_user(target_path, conda_prefix, reverse): r"^\s*" + CONDA_INITIALIZE_RE_BLOCK, "", rc_content, - flags=re.DOTALL | re.MULTILINE + flags=re.DOTALL | re.MULTILINE, ) else: if not on_win: @@ -1103,18 +1270,18 @@ def init_fish_user(target_path, conda_prefix, reverse): rc_content = rc_content.replace(replace_str, conda_initialize_content) if "# >>> conda initialize >>>" not in rc_content: - rc_content += '\n%s\n' % conda_initialize_content + rc_content += "\n%s\n" % conda_initialize_content if rc_content != rc_original_content: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(rc_original_content, rc_content)) if not context.dry_run: # Make the directory if needed. if not exists(dirname(user_rc_path)): mkdir_p(dirname(user_rc_path)) - with open(user_rc_path, 'w') as fh: + with open(user_rc_path, "w") as fh: fh.write(rc_content) return Result.MODIFIED else: @@ -1124,10 +1291,12 @@ def init_fish_user(target_path, conda_prefix, reverse): def _config_xonsh_content(conda_prefix): if on_win: from ..activate import native_path_to_unix - conda_exe = native_path_to_unix(join(conda_prefix, 'Scripts', 'conda.exe')) + + conda_exe = native_path_to_unix(join(conda_prefix, "Scripts", "conda.exe")) else: - conda_exe = join(conda_prefix, 'bin', 'conda') - conda_initialize_content = dals(""" + conda_exe = join(conda_prefix, "bin", "conda") + conda_initialize_content = dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! if !(test -f "{conda_exe}"): @@ -1141,7 +1310,8 @@ def _config_xonsh_content(conda_prefix): _sys.modules["xontrib.conda"] = _mod del _sys, _mod, _ModuleType # <<< conda initialize <<< - """).format(conda_exe=conda_exe) + """ + ).format(conda_exe=conda_exe) return conda_initialize_content @@ -1153,7 +1323,7 @@ def init_xonsh_user(target_path, conda_prefix, reverse): with open(user_rc_path) as fh: rc_content = fh.read() except FileNotFoundError: - rc_content = '' + rc_content = "" except: raise @@ -1175,7 +1345,7 @@ def init_xonsh_user(target_path, conda_prefix, reverse): r"^\s*" + CONDA_INITIALIZE_RE_BLOCK, "", rc_content, - flags=re.DOTALL | re.MULTILINE + flags=re.DOTALL | re.MULTILINE, ) else: replace_str = "__CONDA_REPLACE_ME_123__" @@ -1193,14 +1363,14 @@ def init_xonsh_user(target_path, conda_prefix, reverse): if rc_content != rc_original_content: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(rc_original_content, rc_content)) if not context.dry_run: # Make the directory if needed. if not exists(dirname(user_rc_path)): mkdir_p(dirname(user_rc_path)) - with open(user_rc_path, 'w') as fh: + with open(user_rc_path, "w") as fh: fh.write(rc_content) return Result.MODIFIED else: @@ -1210,22 +1380,30 @@ def init_xonsh_user(target_path, conda_prefix, reverse): def _bashrc_content(conda_prefix, shell): if on_win: from ..activate import native_path_to_unix - conda_exe = native_path_to_unix(join(conda_prefix, 'Scripts', 'conda.exe')) - conda_initialize_content = dals(""" + + conda_exe = native_path_to_unix(join(conda_prefix, "Scripts", "conda.exe")) + conda_initialize_content = ( + dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! if [ -f '%(conda_exe)s' ]; then eval "$('%(conda_exe)s' 'shell.%(shell)s' 'hook')" fi # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, - 'shell': shell, - } + """ + ) + % { + "conda_exe": conda_exe, + "shell": shell, + } + ) else: - conda_exe = join(conda_prefix, 'bin', 'conda') + conda_exe = join(conda_prefix, "bin", "conda") if shell in ("csh", "tcsh"): - conda_initialize_content = dals(""" + conda_initialize_content = ( + dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! if ( -f "%(conda_prefix)s/etc/profile.d/conda.csh" ) then @@ -1234,14 +1412,19 @@ def _bashrc_content(conda_prefix, shell): setenv PATH "%(conda_bin)s:$PATH" endif # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, - 'shell': shell, - 'conda_bin': dirname(conda_exe), - 'conda_prefix': conda_prefix, - } + """ + ) + % { + "conda_exe": conda_exe, + "shell": shell, + "conda_bin": dirname(conda_exe), + "conda_prefix": conda_prefix, + } + ) else: - conda_initialize_content = dals(""" + conda_initialize_content = ( + dals( + """ # >>> conda initialize >>> # !! Contents within this block are managed by 'conda init' !! __conda_setup="$('%(conda_exe)s' 'shell.%(shell)s' 'hook' 2> /dev/null)" @@ -1256,12 +1439,15 @@ def _bashrc_content(conda_prefix, shell): fi unset __conda_setup # <<< conda initialize <<< - """) % { - 'conda_exe': conda_exe, - 'shell': shell, - 'conda_bin': dirname(conda_exe), - 'conda_prefix': conda_prefix, - } + """ + ) + % { + "conda_exe": conda_exe, + "shell": shell, + "conda_bin": dirname(conda_exe), + "conda_prefix": conda_prefix, + } + ) return conda_initialize_content @@ -1273,7 +1459,7 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): with open(user_rc_path) as fh: rc_content = fh.read() except FileNotFoundError: - rc_content = '' + rc_content = "" except: raise @@ -1296,7 +1482,7 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): r"^\s*" + CONDA_INITIALIZE_RE_BLOCK, "", rc_content, - flags=re.DOTALL | re.MULTILINE + flags=re.DOTALL | re.MULTILINE, ) else: if not on_win: @@ -1348,15 +1534,15 @@ def init_sh_user(target_path, conda_prefix, shell, reverse=False): rc_content = rc_content.replace(replace_str, conda_initialize_content) if "# >>> conda initialize >>>" not in rc_content: - rc_content += '\n%s\n' % conda_initialize_content + rc_content += "\n%s\n" % conda_initialize_content if rc_content != rc_original_content: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(rc_original_content, rc_content)) if not context.dry_run: - with open(user_rc_path, 'w') as fh: + with open(user_rc_path, "w") as fh: fh.write(rc_content) return Result.MODIFIED else: @@ -1377,17 +1563,17 @@ def init_sh_system(target_path, conda_prefix, reverse=False): os.remove(conda_sh_system_path) return Result.MODIFIED else: - conda_sh_contents = _bashrc_content(conda_prefix, 'posix') + conda_sh_contents = _bashrc_content(conda_prefix, "posix") if conda_sh_system_contents != conda_sh_contents: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(conda_sh_contents, conda_sh_system_contents)) if not context.dry_run: if lexists(conda_sh_system_path): rm_rf(conda_sh_system_path) mkdir_p(dirname(conda_sh_system_path)) - with open(conda_sh_system_path, 'w') as fh: + with open(conda_sh_system_path, "w") as fh: fh.write(conda_sh_contents) return Result.MODIFIED return Result.NO_CHANGE @@ -1397,8 +1583,8 @@ def _read_windows_registry(target_path): # pragma: no cover # HKEY_LOCAL_MACHINE\Software\Microsoft\Command Processor\AutoRun # HKEY_CURRENT_USER\Software\Microsoft\Command Processor\AutoRun # returns value_value, value_type -or- None, None if target does not exist - main_key, the_rest = target_path.split('\\', 1) - subkey_str, value_name = the_rest.rsplit('\\', 1) + main_key, the_rest = target_path.split("\\", 1) + subkey_str, value_name = the_rest.rsplit("\\", 1) main_key = getattr(winreg, main_key) try: @@ -1424,8 +1610,8 @@ def _read_windows_registry(target_path): # pragma: no cover def _write_windows_registry(target_path, value_value, value_type): # pragma: no cover - main_key, the_rest = target_path.split('\\', 1) - subkey_str, value_name = the_rest.rsplit('\\', 1) + main_key, the_rest = target_path.split("\\", 1) + subkey_str, value_name = the_rest.rsplit("\\", 1) main_key = getattr(winreg, main_key) try: key = winreg.OpenKey(main_key, subkey_str, 0, winreg.KEY_WRITE) @@ -1448,22 +1634,24 @@ def init_cmd_exe_registry(target_path, conda_prefix, reverse=False): prev_value = "" value_type = winreg.REG_EXPAND_SZ - old_hook_path = '"{}"'.format(join(conda_prefix, 'condabin', 'conda_hook.bat')) - new_hook = 'if exist {hp} {hp}'.format(hp=old_hook_path) + old_hook_path = '"{}"'.format(join(conda_prefix, "condabin", "conda_hook.bat")) + new_hook = "if exist {hp} {hp}".format(hp=old_hook_path) if reverse: # we can't just reset it to None and remove it, because there may be other contents here. # We need to strip out our part, and if there's nothing left, remove the key. # Break up string by parts joined with "&" - autorun_parts = prev_value.split('&') + autorun_parts = prev_value.split("&") autorun_parts = [part.strip() for part in autorun_parts if new_hook not in part] # We must remove the old hook path too if it is there - autorun_parts = [part.strip() for part in autorun_parts if old_hook_path not in part] + autorun_parts = [ + part.strip() for part in autorun_parts if old_hook_path not in part + ] new_value = " & ".join(autorun_parts) else: replace_str = "__CONDA_REPLACE_ME_123__" # Replace new (if exist checked) hook new_value = re.sub( - r'(if exist \"[^\"]*?conda[-_]hook\.bat\" \"[^\"]*?conda[-_]hook\.bat\")', + r"(if exist \"[^\"]*?conda[-_]hook\.bat\" \"[^\"]*?conda[-_]hook\.bat\")", replace_str, prev_value, count=1, @@ -1471,27 +1659,29 @@ def init_cmd_exe_registry(target_path, conda_prefix, reverse=False): ) # Replace old hook new_value = re.sub( - r'(\"[^\"]*?conda[-_]hook\.bat\")', + r"(\"[^\"]*?conda[-_]hook\.bat\")", replace_str, new_value, flags=re.IGNORECASE | re.UNICODE, ) # Fold repeats of 'HOOK & HOOK' - new_value_2 = new_value.replace(replace_str + ' & ' + replace_str, replace_str) + new_value_2 = new_value.replace(replace_str + " & " + replace_str, replace_str) while new_value_2 != new_value: new_value = new_value_2 - new_value_2 = new_value.replace(replace_str + ' & ' + replace_str, replace_str) + new_value_2 = new_value.replace( + replace_str + " & " + replace_str, replace_str + ) new_value = new_value_2.replace(replace_str, new_hook) if new_hook not in new_value: if new_value: - new_value += ' & ' + new_hook + new_value += " & " + new_hook else: new_value = new_hook if prev_value != new_value: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(prev_value, new_value)) if not context.dry_run: @@ -1502,15 +1692,15 @@ def init_cmd_exe_registry(target_path, conda_prefix, reverse=False): def init_long_path(target_path): - win_ver, _, win_rev = context.os_distribution_name_version[1].split('.') + win_ver, _, win_rev = context.os_distribution_name_version[1].split(".") # win10, build 14352 was the first preview release that supported this if int(win_ver) >= 10 and int(win_rev) >= 14352: prev_value, value_type = _read_windows_registry(target_path) if str(prev_value) != "1": if context.verbosity: - print('\n') + print("\n") print(target_path) - print(make_diff(str(prev_value), '1')) + print(make_diff(str(prev_value), "1")) if not context.dry_run: _write_windows_registry(target_path, 1, winreg.REG_DWORD) return Result.MODIFIED @@ -1518,28 +1708,36 @@ def init_long_path(target_path): return Result.NO_CHANGE else: if context.verbosity: - print('\n') - print('Not setting long path registry key; Windows version must be at least 10 with ' - 'the fall 2016 "Anniversary update" or newer.') + print("\n") + print( + "Not setting long path registry key; Windows version must be at least 10 with " + 'the fall 2016 "Anniversary update" or newer.' + ) return Result.NO_CHANGE + def _powershell_profile_content(conda_prefix): if on_win: - conda_exe = join(conda_prefix, 'Scripts', 'conda.exe') + conda_exe = join(conda_prefix, "Scripts", "conda.exe") else: - conda_exe = join(conda_prefix, 'bin', 'conda') + conda_exe = join(conda_prefix, "bin", "conda") - conda_powershell_module = dals(""" + conda_powershell_module = dals( + """ #region conda initialize # !! Contents within this block are managed by 'conda init' !! If (Test-Path "{conda_exe}") {{ (& "{conda_exe}" "shell.powershell" "hook") | Out-String | ?{{$_}} | Invoke-Expression }} #endregion - """.format(conda_exe=conda_exe)) + """.format( + conda_exe=conda_exe + ) + ) return conda_powershell_module + def init_powershell_user(target_path, conda_prefix, reverse): # target_path: $PROFILE profile_path = target_path @@ -1557,12 +1755,13 @@ def init_powershell_user(target_path, conda_prefix, reverse): # TODO: comment out old ipmos and Import-Modules. if reverse: - profile_content = re.sub(CONDA_INITIALIZE_PS_RE_BLOCK, - "", - profile_content, - count=1, - flags=re.DOTALL | re.MULTILINE - ) + profile_content = re.sub( + CONDA_INITIALIZE_PS_RE_BLOCK, + "", + profile_content, + count=1, + flags=re.DOTALL | re.MULTILINE, + ) else: # Find what content we need to add. conda_initialize_content = _powershell_profile_content(conda_prefix) @@ -1570,24 +1769,24 @@ def init_powershell_user(target_path, conda_prefix, reverse): if "#region conda initialize" not in profile_content: profile_content += f"\n{conda_initialize_content}\n" else: - profile_content = re.sub(CONDA_INITIALIZE_PS_RE_BLOCK, - "__CONDA_REPLACE_ME_123__", - profile_content, - count=1, - flags=re.DOTALL | re.MULTILINE - ).replace("__CONDA_REPLACE_ME_123__", - conda_initialize_content) + profile_content = re.sub( + CONDA_INITIALIZE_PS_RE_BLOCK, + "__CONDA_REPLACE_ME_123__", + profile_content, + count=1, + flags=re.DOTALL | re.MULTILINE, + ).replace("__CONDA_REPLACE_ME_123__", conda_initialize_content) if profile_content != profile_original_content: if context.verbosity: - print('\n') + print("\n") print(target_path) print(make_diff(profile_original_content, profile_content)) if not context.dry_run: # Make the directory if needed. if not exists(dirname(profile_path)): mkdir_p(dirname(profile_path)) - with open(profile_path, 'w') as fp: + with open(profile_path, "w") as fp: fp.write(profile_content) return Result.MODIFIED else: @@ -1598,12 +1797,14 @@ def remove_conda_in_sp_dir(target_path): # target_path: site_packages_dir modified = False site_packages_dir = target_path - rm_rf_these = chain.from_iterable(( - glob(join(site_packages_dir, "conda-*info")), - glob(join(site_packages_dir, "conda.*")), - glob(join(site_packages_dir, "conda-*.egg")), - )) - rm_rf_these = (p for p in rm_rf_these if not p.endswith('conda.egg-link')) + rm_rf_these = chain.from_iterable( + ( + glob(join(site_packages_dir, "conda-*info")), + glob(join(site_packages_dir, "conda.*")), + glob(join(site_packages_dir, "conda-*.egg")), + ) + ) + rm_rf_these = (p for p in rm_rf_these if not p.endswith("conda.egg-link")) for fn in rm_rf_these: print("rm -rf %s" % join(site_packages_dir, fn), file=sys.stderr) if not context.dry_run: @@ -1631,18 +1832,21 @@ def make_conda_egg_link(target_path, conda_source_root): conda_egg_link_contents = conda_source_root + os.linesep if isfile(target_path): - with open(target_path, 'rb') as fh: + with open(target_path, "rb") as fh: conda_egg_link_contents_old = fh.read() else: conda_egg_link_contents_old = "" if conda_egg_link_contents_old != conda_egg_link_contents: if context.verbosity: - print('\n', file=sys.stderr) + print("\n", file=sys.stderr) print(target_path, file=sys.stderr) - print(make_diff(conda_egg_link_contents_old, conda_egg_link_contents), file=sys.stderr) + print( + make_diff(conda_egg_link_contents_old, conda_egg_link_contents), + file=sys.stderr, + ) if not context.dry_run: - with open(target_path, 'wb') as fh: + with open(target_path, "wb") as fh: fh.write(ensure_utf8_encoding(conda_egg_link_contents)) return Result.MODIFIED else: @@ -1664,16 +1868,22 @@ def modify_easy_install_pth(target_path, conda_source_root): return Result.NO_CHANGE ln_end = os.sep + "conda" - old_contents_lines = tuple(ln for ln in old_contents_lines if not ln.endswith(ln_end)) - new_contents = (easy_install_new_line + os.linesep + - os.linesep.join(old_contents_lines) + os.linesep) + old_contents_lines = tuple( + ln for ln in old_contents_lines if not ln.endswith(ln_end) + ) + new_contents = ( + easy_install_new_line + + os.linesep + + os.linesep.join(old_contents_lines) + + os.linesep + ) if context.verbosity: - print('\n', file=sys.stderr) + print("\n", file=sys.stderr) print(target_path, file=sys.stderr) print(make_diff(old_contents, new_contents), file=sys.stderr) if not context.dry_run: - with open(target_path, 'wb') as fh: + with open(target_path, "wb") as fh: fh.write(ensure_utf8_encoding(new_contents)) return Result.MODIFIED @@ -1687,25 +1897,30 @@ def make_dev_egg_info_file(target_path): else: old_contents = "" - new_contents = dals(""" + new_contents = ( + dals( + """ Metadata-Version: 1.1 Name: conda Version: %s Platform: UNKNOWN Summary: OS-agnostic, system-level binary package manager. - """) % CONDA_VERSION + """ + ) + % CONDA_VERSION + ) if old_contents == new_contents: return Result.NO_CHANGE if context.verbosity: - print('\n', file=sys.stderr) + print("\n", file=sys.stderr) print(target_path, file=sys.stderr) print(make_diff(old_contents, new_contents), file=sys.stderr) if not context.dry_run: if lexists(target_path): rm_rf(target_path) - with open(target_path, 'w') as fh: + with open(target_path, "w") as fh: fh.write(new_contents) return Result.MODIFIED @@ -1714,8 +1929,9 @@ def make_dev_egg_info_file(target_path): # helper functions # ##################################################### + def make_diff(old, new): - return '\n'.join(unified_diff(old.splitlines(), new.splitlines())) + return "\n".join(unified_diff(old.splitlines(), new.splitlines())) def _get_python_info(prefix): @@ -1729,8 +1945,9 @@ def _get_python_info(prefix): else: # pragma: no cover raise ValueError("No python version information available.") - site_packages_dir = join(prefix, - win_path_ok(get_python_site_packages_short_path(python_version))) + site_packages_dir = join( + prefix, win_path_ok(get_python_site_packages_short_path(python_version)) + ) return python_exe, python_version, site_packages_dir diff --git a/conda/core/link.py b/conda/core/link.py index 38b86513b6f..d6151ae0b94 100644 --- a/conda/core/link.py +++ b/conda/core/link.py @@ -1,25 +1,17 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import itertools +import os +import sys +import warnings from collections import defaultdict, namedtuple from itertools import chain from logging import getLogger -import os from os.path import basename, dirname, isdir, join -import sys from pathlib import Path -from traceback import format_exception_only from textwrap import indent -import warnings +from traceback import format_exception_only -from .package_cache_data import PackageCacheData -from .path_actions import (CompileMultiPycAction, CreateNonadminAction, CreatePrefixRecordAction, - CreatePythonEntryPointAction, LinkPathAction, MakeMenuAction, - RegisterEnvironmentLocationAction, RemoveLinkedPackageRecordAction, - RemoveMenuAction, UnlinkPathAction, UnregisterEnvironmentLocationAction, - UpdateHistoryAction, AggregateCompileMultiPycAction) -from .prefix_data import PrefixData, get_python_version_for_prefix from .. import CondaError, CondaMultiError, conda_signal_handler from ..auxlib.collection import first from ..auxlib.ish import dals @@ -27,30 +19,67 @@ from ..base.context import context from ..cli.common import confirm_yn from ..common.compat import ensure_text_type, on_win -from ..common.io import Spinner, dashlist, time_recorder -from ..common.io import DummyExecutor, ThreadLimitedThreadPoolExecutor -from ..common.path import (explode_directories, get_all_directories, get_major_minor_version, - get_python_site_packages_short_path) +from ..common.io import ( + DummyExecutor, + Spinner, + ThreadLimitedThreadPoolExecutor, + dashlist, + time_recorder, +) +from ..common.path import ( + explode_directories, + get_all_directories, + get_major_minor_version, + get_python_site_packages_short_path, +) from ..common.signals import signal_handler -from ..exceptions import (DisallowedPackageError, EnvironmentNotWritableError, - KnownPackageClobberError, LinkError, RemoveError, - SharedLinkPathClobberError, UnknownPackageClobberError, maybe_raise, - CondaSystemExit) +from ..exceptions import ( + CondaSystemExit, + DisallowedPackageError, + EnvironmentNotWritableError, + KnownPackageClobberError, + LinkError, + RemoveError, + SharedLinkPathClobberError, + UnknownPackageClobberError, + maybe_raise, +) from ..gateways.disk import mkdir_p from ..gateways.disk.delete import rm_rf from ..gateways.disk.read import isfile, lexists, read_package_info -from ..gateways.disk.test import hardlink_supported, is_conda_environment, softlink_supported +from ..gateways.disk.test import ( + hardlink_supported, + is_conda_environment, + softlink_supported, +) from ..gateways.subprocess import subprocess_call from ..models.enums import LinkType from ..models.version import VersionOrder from ..resolve import MatchSpec from ..utils import get_comspec, human_bytes, wrap_subprocess_call +from .package_cache_data import PackageCacheData +from .path_actions import ( + AggregateCompileMultiPycAction, + CompileMultiPycAction, + CreateNonadminAction, + CreatePrefixRecordAction, + CreatePythonEntryPointAction, + LinkPathAction, + MakeMenuAction, + RegisterEnvironmentLocationAction, + RemoveLinkedPackageRecordAction, + RemoveMenuAction, + UnlinkPathAction, + UnregisterEnvironmentLocationAction, + UpdateHistoryAction, +) +from .prefix_data import PrefixData, get_python_version_for_prefix log = getLogger(__name__) def determine_link_type(extracted_package_dir, target_prefix): - source_test_file = join(extracted_package_dir, 'info', 'index.json') + source_test_file = join(extracted_package_dir, "info", "index.json") if context.always_copy: return LinkType.copy if context.always_softlink: @@ -64,9 +93,10 @@ def determine_link_type(extracted_package_dir, target_prefix): def make_unlink_actions(transaction_context, target_prefix, prefix_record): # no side effects in this function! - unlink_path_actions = tuple(UnlinkPathAction(transaction_context, prefix_record, - target_prefix, trgt) - for trgt in prefix_record.files) + unlink_path_actions = tuple( + UnlinkPathAction(transaction_context, prefix_record, target_prefix, trgt) + for trgt in prefix_record.files + ) try: extracted_package_dir = basename(prefix_record.extracted_package_dir) @@ -88,9 +118,12 @@ def make_unlink_actions(transaction_context, target_prefix, prefix_record): _all_d = get_all_directories(axn.target_short_path for axn in unlink_path_actions) all_directories = sorted(explode_directories(_all_d), reverse=True) - directory_remove_actions = tuple(UnlinkPathAction(transaction_context, prefix_record, - target_prefix, d, LinkType.directory) - for d in all_directories) + directory_remove_actions = tuple( + UnlinkPathAction( + transaction_context, prefix_record, target_prefix, d, LinkType.directory + ) + for d in all_directories + ) # unregister_private_package_actions = UnregisterPrivateEnvAction.create_actions( # transaction_context, package_cache_record, target_prefix @@ -108,89 +141,114 @@ def match_specs_to_dists(packages_info_to_link, specs): matched_specs = [None for _ in range(len(packages_info_to_link))] for spec in specs or (): spec = MatchSpec(spec) - idx = next((q for q, pkg_info in enumerate(packages_info_to_link) - if pkg_info.repodata_record.name == spec.name), - None) + idx = next( + ( + q + for q, pkg_info in enumerate(packages_info_to_link) + if pkg_info.repodata_record.name == spec.name + ), + None, + ) if idx is not None: matched_specs[idx] = spec return tuple(matched_specs) -PrefixSetup = namedtuple('PrefixSetup', ( - 'target_prefix', - 'unlink_precs', - 'link_precs', - 'remove_specs', - 'update_specs', - 'neutered_specs' -)) - -PrefixActionGroup = namedtuple('PrefixActionGroup', ( - 'remove_menu_action_groups', - 'unlink_action_groups', - 'unregister_action_groups', - 'link_action_groups', - 'register_action_groups', - 'compile_action_groups', - 'make_menu_action_groups', - 'entry_point_action_groups', - 'prefix_record_groups', -)) +PrefixSetup = namedtuple( + "PrefixSetup", + ( + "target_prefix", + "unlink_precs", + "link_precs", + "remove_specs", + "update_specs", + "neutered_specs", + ), +) + +PrefixActionGroup = namedtuple( + "PrefixActionGroup", + ( + "remove_menu_action_groups", + "unlink_action_groups", + "unregister_action_groups", + "link_action_groups", + "register_action_groups", + "compile_action_groups", + "make_menu_action_groups", + "entry_point_action_groups", + "prefix_record_groups", + ), +) # each PrefixGroup item is a sequence of ActionGroups -ActionGroup = namedtuple('ActionGroup', ( - 'type', - 'pkg_data', - 'actions', - 'target_prefix', -)) - -ChangeReport = namedtuple("ChangeReport", ( - "prefix", - "specs_to_remove", - "specs_to_add", - "removed_precs", - "new_precs", - "updated_precs", - "downgraded_precs", - "superseded_precs", - "fetch_precs", -)) +ActionGroup = namedtuple( + "ActionGroup", + ( + "type", + "pkg_data", + "actions", + "target_prefix", + ), +) + +ChangeReport = namedtuple( + "ChangeReport", + ( + "prefix", + "specs_to_remove", + "specs_to_add", + "removed_precs", + "new_precs", + "updated_precs", + "downgraded_precs", + "superseded_precs", + "fetch_precs", + ), +) class UnlinkLinkTransaction: - def __init__(self, *setups): self.prefix_setups = {stp.target_prefix: stp for stp in setups} self.prefix_action_groups = {} for stp in self.prefix_setups.values(): - log.info("initializing UnlinkLinkTransaction with\n" - " target_prefix: %s\n" - " unlink_precs:\n" - " %s\n" - " link_precs:\n" - " %s\n", - stp.target_prefix, - '\n '.join(prec.dist_str() for prec in stp.unlink_precs), - '\n '.join(prec.dist_str() for prec in stp.link_precs)) + log.info( + "initializing UnlinkLinkTransaction with\n" + " target_prefix: %s\n" + " unlink_precs:\n" + " %s\n" + " link_precs:\n" + " %s\n", + stp.target_prefix, + "\n ".join(prec.dist_str() for prec in stp.unlink_precs), + "\n ".join(prec.dist_str() for prec in stp.link_precs), + ) self._pfe = None self._prepared = False self._verified = False # this can be CPU-bound. Use ProcessPoolExecutor. - self.verify_executor = (DummyExecutor() if context.debug or context.verify_threads == 1 - else ThreadLimitedThreadPoolExecutor(context.verify_threads)) + self.verify_executor = ( + DummyExecutor() + if context.debug or context.verify_threads == 1 + else ThreadLimitedThreadPoolExecutor(context.verify_threads) + ) # this is more I/O bound. Use ThreadPoolExecutor. - self.execute_executor = (DummyExecutor() if context.debug or context.execute_threads == 1 - else ThreadLimitedThreadPoolExecutor(context.execute_threads)) + self.execute_executor = ( + DummyExecutor() + if context.debug or context.execute_threads == 1 + else ThreadLimitedThreadPoolExecutor(context.execute_threads) + ) @property def nothing_to_do(self): - return ( - not any((stp.unlink_precs or stp.link_precs) for stp in self.prefix_setups.values()) - and all(is_conda_environment(stp.target_prefix) - for stp in self.prefix_setups.values()) + return not any( + (stp.unlink_precs or stp.link_precs) for stp in self.prefix_setups.values() + ) and all( + is_conda_environment(stp.target_prefix) + for stp in self.prefix_setups.values() ) def download_and_extract(self): @@ -210,13 +268,21 @@ def prepare(self): self.transaction_context = {} - with Spinner("Preparing transaction", not context.verbosity and not context.quiet, - context.json): + with Spinner( + "Preparing transaction", + not context.verbosity and not context.quiet, + context.json, + ): for stp in self.prefix_setups.values(): - grps = self._prepare(self.transaction_context, stp.target_prefix, - stp.unlink_precs, stp.link_precs, - stp.remove_specs, stp.update_specs, - stp.neutered_specs) + grps = self._prepare( + self.transaction_context, + stp.target_prefix, + stp.unlink_precs, + stp.link_precs, + stp.remove_specs, + stp.update_specs, + stp.neutered_specs, + ) self.prefix_action_groups[stp.target_prefix] = PrefixActionGroup(*grps) self._prepared = True @@ -233,7 +299,9 @@ def verify(self): return with Spinner( - "Verifying transaction", not context.verbosity and not context.quiet, context.json + "Verifying transaction", + not context.verbosity and not context.quiet, + context.json, ): exceptions = self._verify(self.prefix_setups, self.prefix_action_groups) if exceptions: @@ -246,7 +314,10 @@ def verify(self): try: self._verify_pre_link_message( itertools.chain( - *(act.link_action_groups for act in self.prefix_action_groups.values()) + *( + act.link_action_groups + for act in self.prefix_action_groups.values() + ) ) ) except CondaSystemExit: @@ -260,7 +331,9 @@ def _verify_pre_link_message(self, all_link_groups): prelink_msg_dir = ( Path(act.pkg_data.extracted_package_dir) / "info" / "prelink_messages" ) - all_msg_subdir = list(item for item in prelink_msg_dir.glob("**/*") if item.is_file()) + all_msg_subdir = list( + item for item in prelink_msg_dir.glob("**/*") if item.is_file() + ) if prelink_msg_dir.is_dir() and all_msg_subdir: print("\n\nThe following PRELINK MESSAGES are INCLUDED:\n\n") flag_pre_link = True @@ -280,36 +353,50 @@ def execute(self): try: # innermost dict.values() is an iterable of PrefixActionGroup namedtuple # zip() is an iterable of each PrefixActionGroup namedtuple key - self._execute(tuple(chain(*chain(*zip(*self.prefix_action_groups.values()))))) + self._execute( + tuple(chain(*chain(*zip(*self.prefix_action_groups.values())))) + ) finally: - rm_rf(self.transaction_context['temp_dir']) + rm_rf(self.transaction_context["temp_dir"]) def _get_pfe(self): from .package_cache_data import ProgressiveFetchExtract + if self._pfe is not None: pfe = self._pfe elif not self.prefix_setups: self._pfe = pfe = ProgressiveFetchExtract(()) else: link_precs = set( - chain.from_iterable(stp.link_precs for stp in self.prefix_setups.values()) + chain.from_iterable( + stp.link_precs for stp in self.prefix_setups.values() + ) ) self._pfe = pfe = ProgressiveFetchExtract(link_precs) return pfe @classmethod - def _prepare(cls, transaction_context, target_prefix, unlink_precs, link_precs, - remove_specs, update_specs, neutered_specs): - + def _prepare( + cls, + transaction_context, + target_prefix, + unlink_precs, + link_precs, + remove_specs, + update_specs, + neutered_specs, + ): # make sure prefix directory exists if not isdir(target_prefix): try: mkdir_p(target_prefix) except OSError as e: log.debug(repr(e)) - raise CondaError("Unable to create prefix directory '%s'.\n" - "Check that you have sufficient permissions." - "" % target_prefix) + raise CondaError( + "Unable to create prefix directory '%s'.\n" + "Check that you have sufficient permissions." + "" % target_prefix + ) # gather information from disk and caches prefix_data = PrefixData(target_prefix) @@ -317,88 +404,123 @@ def _prepare(cls, transaction_context, target_prefix, unlink_precs, link_precs, # NOTE: load_meta can return None # TODO: figure out if this filter shouldn't be an assert not None prefix_recs_to_unlink = tuple(lpd for lpd in prefix_recs_to_unlink if lpd) - pkg_cache_recs_to_link = tuple(PackageCacheData.get_entry_to_link(prec) - for prec in link_precs) + pkg_cache_recs_to_link = tuple( + PackageCacheData.get_entry_to_link(prec) for prec in link_precs + ) assert all(pkg_cache_recs_to_link) - packages_info_to_link = tuple(read_package_info(prec, pcrec) - for prec, pcrec in zip(link_precs, pkg_cache_recs_to_link)) + packages_info_to_link = tuple( + read_package_info(prec, pcrec) + for prec, pcrec in zip(link_precs, pkg_cache_recs_to_link) + ) - link_types = tuple(determine_link_type(pkg_info.extracted_package_dir, target_prefix) - for pkg_info in packages_info_to_link) + link_types = tuple( + determine_link_type(pkg_info.extracted_package_dir, target_prefix) + for pkg_info in packages_info_to_link + ) # make all the path actions # no side effects allowed when instantiating these action objects - python_version = cls._get_python_version(target_prefix, - prefix_recs_to_unlink, - packages_info_to_link) - transaction_context['target_python_version'] = python_version + python_version = cls._get_python_version( + target_prefix, prefix_recs_to_unlink, packages_info_to_link + ) + transaction_context["target_python_version"] = python_version sp = get_python_site_packages_short_path(python_version) - transaction_context['target_site_packages_short_path'] = sp + transaction_context["target_site_packages_short_path"] = sp - transaction_context['temp_dir'] = join(target_prefix, '.condatmp') + transaction_context["temp_dir"] = join(target_prefix, ".condatmp") remove_menu_action_groups = [] unlink_action_groups = [] for prefix_rec in prefix_recs_to_unlink: - unlink_action_groups.append(ActionGroup( - 'unlink', - prefix_rec, - make_unlink_actions(transaction_context, target_prefix, prefix_rec), - target_prefix)) - - remove_menu_action_groups.append(ActionGroup( - 'remove_menus', - prefix_rec, - RemoveMenuAction.create_actions( - transaction_context, prefix_rec, target_prefix), - target_prefix)) + unlink_action_groups.append( + ActionGroup( + "unlink", + prefix_rec, + make_unlink_actions(transaction_context, target_prefix, prefix_rec), + target_prefix, + ) + ) + + remove_menu_action_groups.append( + ActionGroup( + "remove_menus", + prefix_rec, + RemoveMenuAction.create_actions( + transaction_context, prefix_rec, target_prefix + ), + target_prefix, + ) + ) if unlink_action_groups: - axns = UnregisterEnvironmentLocationAction(transaction_context, target_prefix), - unregister_action_groups = [ActionGroup('unregister', None, axns, target_prefix)] + axns = ( + UnregisterEnvironmentLocationAction(transaction_context, target_prefix), + ) + unregister_action_groups = [ + ActionGroup("unregister", None, axns, target_prefix) + ] else: unregister_action_groups = () - matchspecs_for_link_dists = match_specs_to_dists(packages_info_to_link, update_specs) + matchspecs_for_link_dists = match_specs_to_dists( + packages_info_to_link, update_specs + ) link_action_groups = [] entry_point_action_groups = [] compile_action_groups = [] make_menu_action_groups = [] record_axns = [] - for pkg_info, lt, spec in zip(packages_info_to_link, link_types, - matchspecs_for_link_dists): + for pkg_info, lt, spec in zip( + packages_info_to_link, link_types, matchspecs_for_link_dists + ): link_ag = ActionGroup( - 'link', + "link", pkg_info, - cls._make_link_actions(transaction_context, pkg_info, - target_prefix, lt, spec), - target_prefix) + cls._make_link_actions( + transaction_context, pkg_info, target_prefix, lt, spec + ), + target_prefix, + ) link_action_groups.append(link_ag) entry_point_ag = ActionGroup( - 'entry_point', + "entry_point", pkg_info, cls._make_entry_point_actions( - transaction_context, pkg_info, target_prefix, - lt, spec, link_action_groups), - target_prefix) + transaction_context, + pkg_info, + target_prefix, + lt, + spec, + link_action_groups, + ), + target_prefix, + ) entry_point_action_groups.append(entry_point_ag) compile_ag = ActionGroup( - 'compile', + "compile", pkg_info, cls._make_compile_actions( - transaction_context, pkg_info, target_prefix, - lt, spec, link_action_groups), - target_prefix) + transaction_context, + pkg_info, + target_prefix, + lt, + spec, + link_action_groups, + ), + target_prefix, + ) compile_action_groups.append(compile_ag) make_menu_ag = ActionGroup( - 'make_menus', + "make_menus", pkg_info, MakeMenuAction.create_actions( - transaction_context, pkg_info, target_prefix, lt), - target_prefix) + transaction_context, pkg_info, target_prefix, lt + ), + target_prefix, + ) make_menu_action_groups.append(make_menu_ag) all_link_path_actions = ( @@ -407,20 +529,36 @@ def _prepare(cls, transaction_context, target_prefix, unlink_precs, link_precs, *entry_point_ag.actions, *make_menu_ag.actions, ) - record_axns.extend(CreatePrefixRecordAction.create_actions( - transaction_context, pkg_info, target_prefix, lt, spec, all_link_path_actions)) + record_axns.extend( + CreatePrefixRecordAction.create_actions( + transaction_context, + pkg_info, + target_prefix, + lt, + spec, + all_link_path_actions, + ) + ) - prefix_record_groups = [ActionGroup('record', None, record_axns, target_prefix)] + prefix_record_groups = [ActionGroup("record", None, record_axns, target_prefix)] # We're post solve here. The update_specs are explicit requests. We need to neuter # any historic spec that was neutered prior to the solve. history_actions = UpdateHistoryAction.create_actions( - transaction_context, target_prefix, remove_specs, update_specs, neutered_specs + transaction_context, + target_prefix, + remove_specs, + update_specs, + neutered_specs, + ) + register_actions = ( + RegisterEnvironmentLocationAction(transaction_context, target_prefix), ) - register_actions = RegisterEnvironmentLocationAction(transaction_context, target_prefix), - register_action_groups = [ActionGroup('register', None, - register_actions + history_actions, - target_prefix)] + register_action_groups = [ + ActionGroup( + "register", None, register_actions + history_actions, target_prefix + ) + ] return PrefixActionGroup( remove_menu_action_groups, unlink_action_groups, @@ -436,7 +574,9 @@ def _prepare(cls, transaction_context, target_prefix, unlink_precs, link_precs, @staticmethod def _verify_individual_level(prefix_action_group): all_actions = chain.from_iterable( - axngroup.actions for action_groups in prefix_action_group for axngroup in action_groups + axngroup.actions + for action_groups in prefix_action_group + for axngroup in action_groups ) # run all per-action (per-package) verify methods @@ -448,7 +588,9 @@ def _verify_individual_level(prefix_action_group): continue error_result = axn.verify() if error_result: - formatted_error = ''.join(format_exception_only(type(error_result), error_result)) + formatted_error = "".join( + format_exception_only(type(error_result), error_result) + ) log.debug("Verification error in action %s\n%s", axn, formatted_error) error_results.append(error_result) return error_results @@ -481,10 +623,12 @@ def _verify_prefix_level(target_prefix_AND_prefix_action_group_tuple): } # we can get all of the paths being linked by looking only at the # CreateLinkedPackageRecordAction actions - create_lpr_actions = (axn - for grp in prefix_record_groups - for axn in grp.actions - if isinstance(axn, CreatePrefixRecordAction)) + create_lpr_actions = ( + axn + for grp in prefix_record_groups + for axn in grp.actions + if isinstance(axn, CreatePrefixRecordAction) + ) error_results = [] # Verification 1. each path either doesn't already exist in the prefix, or will be unlinked @@ -508,32 +652,44 @@ def _verify_prefix_level(target_prefix_AND_prefix_action_group_tuple): if path not in unlink_paths and lexists(join(target_prefix, path)): # we have a collision; at least try to figure out where it came from colliding_prefix_rec = first( - (prefix_rec for prefix_rec in - PrefixData(target_prefix).iter_records()), - key=lambda prefix_rec: path in prefix_rec.files + ( + prefix_rec + for prefix_rec in PrefixData( + target_prefix + ).iter_records() + ), + key=lambda prefix_rec: path in prefix_rec.files, ) if colliding_prefix_rec: - error_results.append(KnownPackageClobberError( - path, - axn.package_info.repodata_record.dist_str(), - colliding_prefix_rec.dist_str(), - context, - )) + error_results.append( + KnownPackageClobberError( + path, + axn.package_info.repodata_record.dist_str(), + colliding_prefix_rec.dist_str(), + context, + ) + ) else: - error_results.append(UnknownPackageClobberError( - path, - axn.package_info.repodata_record.dist_str(), - context, - )) + error_results.append( + UnknownPackageClobberError( + path, + axn.package_info.repodata_record.dist_str(), + context, + ) + ) # Verification 2. there's only a single instance of each path for path, axns in link_paths_dict.items(): if len(axns) > 1: - error_results.append(SharedLinkPathClobberError( - path, - tuple(axn.package_info.repodata_record.dist_str() for axn in axns), - context, - )) + error_results.append( + SharedLinkPathClobberError( + path, + tuple( + axn.package_info.repodata_record.dist_str() for axn in axns + ), + context, + ) + ) return error_results @staticmethod @@ -545,27 +701,39 @@ def _verify_transaction_level(prefix_setups): # 5. make sure conda-meta/history for each prefix is writable # TODO: Verification 4 - conda_prefixes = (join(context.root_prefix, 'envs', '_conda_'), context.root_prefix) - conda_setups = tuple(setup for setup in prefix_setups.values() - if setup.target_prefix in conda_prefixes) + conda_prefixes = ( + join(context.root_prefix, "envs", "_conda_"), + context.root_prefix, + ) + conda_setups = tuple( + setup + for setup in prefix_setups.values() + if setup.target_prefix in conda_prefixes + ) - conda_unlinked = any(prec.name == 'conda' - for setup in conda_setups - for prec in setup.unlink_precs) + conda_unlinked = any( + prec.name == "conda" + for setup in conda_setups + for prec in setup.unlink_precs + ) conda_prec, conda_final_setup = next( - ((prec, setup) - for setup in conda_setups - for prec in setup.link_precs - if prec.name == 'conda'), - (None, None) + ( + (prec, setup) + for setup in conda_setups + for prec in setup.link_precs + if prec.name == "conda" + ), + (None, None), ) if conda_unlinked and conda_final_setup is None: # means conda is being unlinked and not re-linked anywhere # this should never be able to be skipped, even with --force - yield RemoveError("This operation will remove conda without replacing it with\n" - "another version of conda.") + yield RemoveError( + "This operation will remove conda without replacing it with\n" + "another version of conda." + ) if conda_final_setup is None: # means we're not unlinking then linking a new package, so look up current conda record @@ -575,26 +743,36 @@ def _verify_transaction_level(prefix_setups): pkg_names_being_lnkd = () pkg_names_being_unlnkd = () conda_linked_depends = next( - (record.depends for record in pd.iter_records() if record.name == 'conda'), - () + ( + record.depends + for record in pd.iter_records() + if record.name == "conda" + ), + (), ) else: conda_final_prefix = conda_final_setup.target_prefix pd = PrefixData(conda_final_prefix) pkg_names_already_lnkd = tuple(rec.name for rec in pd.iter_records()) - pkg_names_being_lnkd = tuple(prec.name for prec in conda_final_setup.link_precs or ()) - pkg_names_being_unlnkd = tuple(prec.name for prec in conda_final_setup.unlink_precs - or ()) + pkg_names_being_lnkd = tuple( + prec.name for prec in conda_final_setup.link_precs or () + ) + pkg_names_being_unlnkd = tuple( + prec.name for prec in conda_final_setup.unlink_precs or () + ) conda_linked_depends = conda_prec.depends if conda_final_prefix in prefix_setups: for conda_dependency in conda_linked_depends: dep_name = MatchSpec(conda_dependency).name if dep_name not in pkg_names_being_lnkd and ( - dep_name not in pkg_names_already_lnkd or - dep_name in pkg_names_being_unlnkd): - yield RemoveError("'%s' is a dependency of conda and cannot be removed from\n" - "conda's operating environment." % dep_name) + dep_name not in pkg_names_already_lnkd + or dep_name in pkg_names_being_unlnkd + ): + yield RemoveError( + "'%s' is a dependency of conda and cannot be removed from\n" + "conda's operating environment." % dep_name + ) # Verification 3. enforce disallowed_packages disallowed = tuple(MatchSpec(s) for s in context.disallowed_packages) @@ -623,47 +801,64 @@ def _verify_transaction_level(prefix_setups): def _verify(self, prefix_setups, prefix_action_groups): transaction_exceptions = tuple( - exc for exc in UnlinkLinkTransaction._verify_transaction_level(prefix_setups) if exc + exc + for exc in UnlinkLinkTransaction._verify_transaction_level(prefix_setups) + if exc ) if transaction_exceptions: return transaction_exceptions exceptions = [] - for exc in self.verify_executor.map(UnlinkLinkTransaction._verify_individual_level, - prefix_action_groups.values()): + for exc in self.verify_executor.map( + UnlinkLinkTransaction._verify_individual_level, + prefix_action_groups.values(), + ): if exc: exceptions.extend(exc) for exc in self.verify_executor.map( - UnlinkLinkTransaction._verify_prefix_level, - prefix_action_groups.items()): + UnlinkLinkTransaction._verify_prefix_level, prefix_action_groups.items() + ): if exc: exceptions.extend(exc) return exceptions def _execute(self, all_action_groups): # unlink unlink_action_groups and unregister_action_groups - unlink_actions = tuple(group for group in all_action_groups if group.type == "unlink") + unlink_actions = tuple( + group for group in all_action_groups if group.type == "unlink" + ) # link unlink_action_groups and register_action_groups - link_actions = list(group for group in all_action_groups if group.type == "link") - compile_actions = list(group for group in all_action_groups if group.type == "compile") - entry_point_actions = list(group for group in all_action_groups - if group.type == "entry_point") - record_actions = list(group for group in all_action_groups if group.type == "record") - make_menu_actions = list(group for group in all_action_groups - if group.type == "make_menus") - remove_menu_actions = list(group for group in all_action_groups - if group.type == "remove_menus") + link_actions = list( + group for group in all_action_groups if group.type == "link" + ) + compile_actions = list( + group for group in all_action_groups if group.type == "compile" + ) + entry_point_actions = list( + group for group in all_action_groups if group.type == "entry_point" + ) + record_actions = list( + group for group in all_action_groups if group.type == "record" + ) + make_menu_actions = list( + group for group in all_action_groups if group.type == "make_menus" + ) + remove_menu_actions = list( + group for group in all_action_groups if group.type == "remove_menus" + ) with signal_handler(conda_signal_handler), time_recorder("unlink_link_execute"): exceptions = [] - with Spinner("Executing transaction", not context.verbosity and not context.quiet, - context.json): - + with Spinner( + "Executing transaction", + not context.verbosity and not context.quiet, + context.json, + ): # Execute unlink actions - for (group, register_group, install_side) in ( - (unlink_actions, "unregister", False), - (link_actions, "register", True)): - + for group, register_group, install_side in ( + (unlink_actions, "unregister", False), + (link_actions, "register", True), + ): if not install_side: # uninstalling menus must happen prior to unlinking, or else they might # call something that isn't there anymore @@ -671,17 +866,20 @@ def _execute(self, all_action_groups): UnlinkLinkTransaction._execute_actions(axngroup) for axngroup in group: - is_unlink = axngroup.type == 'unlink' + is_unlink = axngroup.type == "unlink" target_prefix = axngroup.target_prefix prec = axngroup.pkg_data - run_script(target_prefix if is_unlink else prec.extracted_package_dir, - prec, - 'pre-unlink' if is_unlink else 'pre-link', - target_prefix) + run_script( + target_prefix if is_unlink else prec.extracted_package_dir, + prec, + "pre-unlink" if is_unlink else "pre-link", + target_prefix, + ) # parallel block 1: - for exc in self.execute_executor.map(UnlinkLinkTransaction._execute_actions, - group): + for exc in self.execute_executor.map( + UnlinkLinkTransaction._execute_actions, group + ): if exc: exceptions.append(exc) @@ -703,20 +901,34 @@ def _execute(self, all_action_groups): if install_side: composite_ag.extend(record_actions) # consolidate compile actions into one big'un for better efficiency - individual_actions = [axn for ag in compile_actions for axn in ag.actions] + individual_actions = [ + axn for ag in compile_actions for axn in ag.actions + ] if individual_actions: - composite = AggregateCompileMultiPycAction(*individual_actions) - composite_ag.append(ActionGroup('compile', None, [composite], - composite.target_prefix)) + composite = AggregateCompileMultiPycAction( + *individual_actions + ) + composite_ag.append( + ActionGroup( + "compile", + None, + [composite], + composite.target_prefix, + ) + ) # functions return None unless there was an exception - for exc in self.execute_executor.map(UnlinkLinkTransaction._execute_actions, - composite_ag): + for exc in self.execute_executor.map( + UnlinkLinkTransaction._execute_actions, composite_ag + ): if exc: exceptions.append(exc) # must do the register actions AFTER all link/unlink is done - register_actions = tuple(group for group in all_action_groups - if group.type == register_group) + register_actions = tuple( + group + for group in all_action_groups + if group.type == register_group + ) for axngroup in register_actions: exc = UnlinkLinkTransaction._execute_actions(axngroup) if exc: @@ -733,21 +945,25 @@ def _execute(self, all_action_groups): e = exceptions[0] axngroup = e.errors[1] - action, is_unlink = (None, axngroup.type == 'unlink') + action, is_unlink = (None, axngroup.type == "unlink") prec = axngroup.pkg_data if prec: log.error( "An error occurred while {} package '{}'.".format( - "uninstalling" if is_unlink else "installing", prec.dist_str() + "uninstalling" if is_unlink else "installing", + prec.dist_str(), ) ) # reverse all executed packages except the one that failed rollback_excs = [] if context.rollback_enabled: - with Spinner("Rolling back transaction", - not context.verbosity and not context.quiet, context.json): + with Spinner( + "Rolling back transaction", + not context.verbosity and not context.quiet, + context.json, + ): reverse_actions = reversed(tuple(all_action_groups)) for axngroup in reverse_actions: excs = UnlinkLinkTransaction._reverse_actions(axngroup) @@ -755,7 +971,11 @@ def _execute(self, all_action_groups): raise CondaMultiError( ( - *((e.errors[0], e.errors[2:]) if isinstance(e, CondaMultiError) else (e,)), + *( + (e.errors[0], e.errors[2:]) + if isinstance(e, CondaMultiError) + else (e,) + ), *rollback_excs, ) ) @@ -769,21 +989,25 @@ def _execute_actions(axngroup): target_prefix = axngroup.target_prefix prec = axngroup.pkg_data - conda_meta_dir = join(target_prefix, 'conda-meta') + conda_meta_dir = join(target_prefix, "conda-meta") if not isdir(conda_meta_dir): mkdir_p(conda_meta_dir) try: - if axngroup.type == 'unlink': - log.info("===> UNLINKING PACKAGE: %s <===\n" - " prefix=%s\n", - prec.dist_str(), target_prefix) + if axngroup.type == "unlink": + log.info( + "===> UNLINKING PACKAGE: %s <===\n" " prefix=%s\n", + prec.dist_str(), + target_prefix, + ) - elif axngroup.type == 'link': - log.info("===> LINKING PACKAGE: %s <===\n" - " prefix=%s\n" - " source=%s\n", - prec.dist_str(), target_prefix, prec.extracted_package_dir) + elif axngroup.type == "link": + log.info( + "===> LINKING PACKAGE: %s <===\n" " prefix=%s\n" " source=%s\n", + prec.dist_str(), + target_prefix, + prec.extracted_package_dir, + ) for action in axngroup.actions: action.execute() @@ -803,12 +1027,16 @@ def _execute_actions(axngroup): @staticmethod def _execute_post_link_actions(axngroup): target_prefix = axngroup.target_prefix - is_unlink = axngroup.type == 'unlink' + is_unlink = axngroup.type == "unlink" prec = axngroup.pkg_data if prec: try: - run_script(target_prefix, prec, 'post-unlink' if is_unlink else 'post-link', - activate=True) + run_script( + target_prefix, + prec, + "post-unlink" if is_unlink else "post-link", + activate=True, + ) except Exception as e: # this won't be a multi error # reverse this package reverse_excs = () @@ -829,19 +1057,25 @@ def _reverse_actions(axngroup, reverse_from_idx=-1): # reverse_from_idx = -1 means reverse all actions prec = axngroup.pkg_data - if axngroup.type == 'unlink': - log.info("===> REVERSING PACKAGE UNLINK: %s <===\n" - " prefix=%s\n", prec.dist_str(), target_prefix) + if axngroup.type == "unlink": + log.info( + "===> REVERSING PACKAGE UNLINK: %s <===\n" " prefix=%s\n", + prec.dist_str(), + target_prefix, + ) - elif axngroup.type == 'link': - log.info("===> REVERSING PACKAGE LINK: %s <===\n" - " prefix=%s\n", prec.dist_str(), target_prefix) + elif axngroup.type == "link": + log.info( + "===> REVERSING PACKAGE LINK: %s <===\n" " prefix=%s\n", + prec.dist_str(), + target_prefix, + ) exceptions = [] if reverse_from_idx < 0: reverse_actions = axngroup.actions else: - reverse_actions = axngroup.actions[:reverse_from_idx+1] + reverse_actions = axngroup.actions[: reverse_from_idx + 1] for axn_idx, action in reversed(tuple(enumerate(reverse_actions))): try: action.reverse() @@ -854,9 +1088,14 @@ def _reverse_actions(axngroup, reverse_from_idx=-1): def _get_python_version(target_prefix, pcrecs_to_unlink, packages_info_to_link): # this method determines the python version that will be present at the # end of the transaction - linking_new_python = next((package_info for package_info in packages_info_to_link - if package_info.repodata_record.name == 'python'), - None) + linking_new_python = next( + ( + package_info + for package_info in packages_info_to_link + if package_info.repodata_record.name == "python" + ), + None, + ) if linking_new_python: # is python being linked? we're done full_version = linking_new_python.repodata_record.version @@ -867,12 +1106,17 @@ def _get_python_version(target_prefix, pcrecs_to_unlink, packages_info_to_link): # is python already linked and not being unlinked? that's ok too linked_python_version = get_python_version_for_prefix(target_prefix) if linked_python_version: - find_python = (lnkd_pkg_data for lnkd_pkg_data in pcrecs_to_unlink - if lnkd_pkg_data.name == 'python') + find_python = ( + lnkd_pkg_data + for lnkd_pkg_data in pcrecs_to_unlink + if lnkd_pkg_data.name == "python" + ) unlinking_this_python = next(find_python, None) if unlinking_this_python is None: # python is not being unlinked - log.debug("found in current prefix python version %s", linked_python_version) + log.debug( + "found in current prefix python version %s", linked_python_version + ) return linked_python_version # there won't be any python in the finished environment @@ -880,9 +1124,19 @@ def _get_python_version(target_prefix, pcrecs_to_unlink, packages_info_to_link): return None @staticmethod - def _make_link_actions(transaction_context, package_info, target_prefix, requested_link_type, - requested_spec): - required_quad = transaction_context, package_info, target_prefix, requested_link_type + def _make_link_actions( + transaction_context, + package_info, + target_prefix, + requested_link_type, + requested_spec, + ): + required_quad = ( + transaction_context, + package_info, + target_prefix, + requested_link_type, + ) file_link_actions = LinkPathAction.create_file_link_actions(*required_quad) create_directory_actions = LinkPathAction.create_directory_actions( @@ -898,18 +1152,43 @@ def _make_link_actions(transaction_context, package_info, target_prefix, request ) @staticmethod - def _make_entry_point_actions(transaction_context, package_info, target_prefix, - requested_link_type, requested_spec, link_action_groups): - required_quad = transaction_context, package_info, target_prefix, requested_link_type + def _make_entry_point_actions( + transaction_context, + package_info, + target_prefix, + requested_link_type, + requested_spec, + link_action_groups, + ): + required_quad = ( + transaction_context, + package_info, + target_prefix, + requested_link_type, + ) return CreatePythonEntryPointAction.create_actions(*required_quad) @staticmethod - def _make_compile_actions(transaction_context, package_info, target_prefix, - requested_link_type, requested_spec, link_action_groups): - required_quad = transaction_context, package_info, target_prefix, requested_link_type - link_action_group = next(ag for ag in link_action_groups if ag.pkg_data == package_info) - return CompileMultiPycAction.create_actions(*required_quad, - file_link_actions=link_action_group.actions) + def _make_compile_actions( + transaction_context, + package_info, + target_prefix, + requested_link_type, + requested_spec, + link_action_groups, + ): + required_quad = ( + transaction_context, + package_info, + target_prefix, + requested_link_type, + ) + link_action_group = next( + ag for ag in link_action_groups if ag.pkg_data == package_info + ) + return CompileMultiPycAction.create_actions( + *required_quad, file_link_actions=link_action_group.actions + ) def _make_legacy_action_groups(self): # this code reverts json output for plan back to previous behavior @@ -924,17 +1203,18 @@ def _make_legacy_action_groups(self): if q == 0: self._pfe.prepare() download_urls = {axn.url for axn in self._pfe.cache_actions} - actions['FETCH'].extend(prec for prec in self._pfe.link_precs - if prec.url in download_urls) + actions["FETCH"].extend( + prec for prec in self._pfe.link_precs if prec.url in download_urls + ) - actions['PREFIX'] = setup.target_prefix + actions["PREFIX"] = setup.target_prefix for prec in setup.unlink_precs: - actions['UNLINK'].append(prec) + actions["UNLINK"].append(prec) for prec in setup.link_precs: # TODO (AV): maybe add warnings about unverified packages here; # be warned that doing so may break compatibility with other # applications. - actions['LINK'].append(prec) + actions["LINK"].append(prec) legacy_action_groups.append(actions) @@ -945,10 +1225,17 @@ def print_transaction_summary(self): download_urls = {axn.url for axn in self._pfe.cache_actions} - for actions, (prefix, stp) in zip(legacy_action_groups, self.prefix_setups.items()): - change_report = self._calculate_change_report(prefix, stp.unlink_precs, stp.link_precs, - download_urls, stp.remove_specs, - stp.update_specs) + for actions, (prefix, stp) in zip( + legacy_action_groups, self.prefix_setups.items() + ): + change_report = self._calculate_change_report( + prefix, + stp.unlink_precs, + stp.link_precs, + download_urls, + stp.remove_specs, + stp.update_specs, + ) change_report_str = self._change_report_str(change_report) print(ensure_text_type(change_report_str)) @@ -956,34 +1243,41 @@ def print_transaction_summary(self): def _change_report_str(self, change_report): # TODO (AV): add warnings about unverified packages in this function - builder = ['', '## Package Plan ##\n'] - builder.append(' environment location: %s' % change_report.prefix) - builder.append('') + builder = ["", "## Package Plan ##\n"] + builder.append(" environment location: %s" % change_report.prefix) + builder.append("") if change_report.specs_to_remove: - builder.append(' removed specs:%s' - % dashlist(sorted(str(s) for s in change_report.specs_to_remove), - indent=4)) - builder.append('') + builder.append( + " removed specs:%s" + % dashlist( + sorted(str(s) for s in change_report.specs_to_remove), indent=4 + ) + ) + builder.append("") if change_report.specs_to_add: - builder.append(' added / updated specs:%s' - % dashlist(sorted(str(s) for s in change_report.specs_to_add), - indent=4)) - builder.append('') + builder.append( + " added / updated specs:%s" + % dashlist(sorted(str(s) for s in change_report.specs_to_add), indent=4) + ) + builder.append("") def channel_filt(s): if context.show_channel_urls is False: - return '' + return "" if context.show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME: - return '' + return "" return s def print_dists(dists_extras): lines = [] fmt = " %-27s|%17s" - lines.append(fmt % ('package', 'build')) - lines.append(fmt % ('-' * 27, '-' * 17)) + lines.append(fmt % ("package", "build")) + lines.append(fmt % ("-" * 27, "-" * 17)) for prec, extra in dists_extras: - line = fmt % (strip_global(prec.namekey) + '-' + prec.version, prec.build) + line = fmt % ( + strip_global(prec.namekey) + "-" + prec.version, + prec.build, + ) if extra: line += extra lines.append(line) @@ -997,18 +1291,19 @@ def print_dists(dists_extras): disp_lst = [] total_download_bytes = 0 - for prec in sorted(change_report.fetch_precs, - key=lambda x: convert_namekey(x.namekey)): + for prec in sorted( + change_report.fetch_precs, key=lambda x: convert_namekey(x.namekey) + ): size = prec.size - extra = '%15s' % human_bytes(size) + extra = "%15s" % human_bytes(size) total_download_bytes += size schannel = channel_filt(str(prec.channel.canonical_name)) if schannel: - extra += ' ' + schannel + extra += " " + schannel disp_lst.append((prec, extra)) builder.extend(print_dists(disp_lst)) - builder.append(' ' * 4 + '-' * 60) + builder.append(" " * 4 + "-" * 60) builder.append(" " * 43 + "Total: %14s" % human_bytes(total_download_bytes)) def diff_strs(unlink_prec, link_prec): @@ -1035,7 +1330,7 @@ def diff_strs(unlink_prec, link_prec): builder_left.append(unlink_prec.version + "-" + unlink_prec.build) builder_right.append(link_prec.version + "-" + link_prec.build) - return ''.join(builder_left), ''.join(builder_right) + return "".join(builder_left), "".join(builder_right) def add_single(display_key, disp_str): if len(display_key) > 18: @@ -1062,7 +1357,9 @@ def add_double(display_key, left_str, right_str): builder.append("\nThe following packages will be REMOVED:\n") for namekey in sorted(change_report.removed_precs, key=convert_namekey): unlink_prec = change_report.removed_precs[namekey] - builder.append(f" {unlink_prec.name}-{unlink_prec.version}-{unlink_prec.build}") + builder.append( + f" {unlink_prec.name}-{unlink_prec.version}-{unlink_prec.build}" + ) if change_report.updated_precs: builder.append("\nThe following packages will be UPDATED:\n") @@ -1076,8 +1373,10 @@ def add_double(display_key, left_str, right_str): ) if change_report.superseded_precs: - builder.append("\nThe following packages will be SUPERSEDED " - "by a higher-priority channel:\n") + builder.append( + "\nThe following packages will be SUPERSEDED " + "by a higher-priority channel:\n" + ) for namekey in sorted(change_report.superseded_precs, key=convert_namekey): unlink_prec, link_prec = change_report.superseded_precs[namekey] left_str, right_str = diff_strs(unlink_prec, link_prec) @@ -1097,21 +1396,25 @@ def add_double(display_key, left_str, right_str): left_str, f"{right_str} {link_prec['metadata_signature_status']}", ) - builder.append('') - builder.append('') + builder.append("") + builder.append("") return "\n".join(builder) @staticmethod - def _calculate_change_report(prefix, unlink_precs, link_precs, download_urls, specs_to_remove, - specs_to_add): + def _calculate_change_report( + prefix, unlink_precs, link_precs, download_urls, specs_to_remove, specs_to_add + ): unlink_map = {prec.namekey: prec for prec in unlink_precs} link_map = {prec.namekey: prec for prec in link_precs} unlink_namekeys, link_namekeys = set(unlink_map), set(link_map) - removed_precs = {namekey: unlink_map[namekey] - for namekey in (unlink_namekeys - link_namekeys)} - new_precs = {namekey: link_map[namekey] - for namekey in (link_namekeys - unlink_namekeys)} + removed_precs = { + namekey: unlink_map[namekey] + for namekey in (unlink_namekeys - link_namekeys) + } + new_precs = { + namekey: link_map[namekey] for namekey in (link_namekeys - unlink_namekeys) + } # updated means a version increase, or a build number increase # downgraded means a version decrease, or build number decrease, but channel canonical_name @@ -1129,8 +1432,10 @@ def _calculate_change_report(prefix, unlink_precs, link_precs, download_urls, sp build_number_increases = link_prec.build_number > unlink_prec.build_number if link_vo == unlink_vo and build_number_increases or link_vo > unlink_vo: updated_precs[namekey] = (unlink_prec, link_prec) - elif (link_prec.channel.name == unlink_prec.channel.name - and link_prec.subdir == unlink_prec.subdir): + elif ( + link_prec.channel.name == unlink_prec.channel.name + and link_prec.subdir == unlink_prec.subdir + ): if link_prec == unlink_prec: # noarch: python packages are re-linked on a python version change # just leave them out of the package report @@ -1140,13 +1445,21 @@ def _calculate_change_report(prefix, unlink_precs, link_precs, download_urls, sp superseded_precs[namekey] = (unlink_prec, link_prec) fetch_precs = {prec for prec in link_precs if prec.url in download_urls} - change_report = ChangeReport(prefix, specs_to_remove, specs_to_add, removed_precs, - new_precs, updated_precs, downgraded_precs, superseded_precs, - fetch_precs) + change_report = ChangeReport( + prefix, + specs_to_remove, + specs_to_add, + removed_precs, + new_precs, + updated_precs, + downgraded_precs, + superseded_precs, + fetch_precs, + ) return change_report -def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False): +def run_script(prefix, prec, action="post-link", env_prefix=None, activate=False): """ call the post-link (or pre-unlink) script, and return True on success, False on failure @@ -1161,33 +1474,43 @@ def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False env = os.environ.copy() - if action == 'pre-link': # pragma: no cover + if action == "pre-link": # pragma: no cover # old no-arch support; deprecated is_old_noarch = False try: with open(path) as f: script_text = ensure_text_type(f.read()) - if ((on_win and "%PREFIX%\\python.exe %SOURCE_DIR%\\link.py" in script_text) - or "$PREFIX/bin/python $SOURCE_DIR/link.py" in script_text): + if ( + on_win and "%PREFIX%\\python.exe %SOURCE_DIR%\\link.py" in script_text + ) or "$PREFIX/bin/python $SOURCE_DIR/link.py" in script_text: is_old_noarch = True except Exception as e: log.debug(e, exc_info=True) - env['SOURCE_DIR'] = prefix + env["SOURCE_DIR"] = prefix if not is_old_noarch: - warnings.warn(dals(""" + warnings.warn( + dals( + """ Package %s uses a pre-link script. Pre-link scripts are potentially dangerous. This is because pre-link scripts have the ability to change the package contents in the package cache, and therefore modify the underlying files for already-created conda environments. Future versions of conda may deprecate and ignore pre-link scripts. - """) % prec.dist_str()) + """ + ) + % prec.dist_str() + ) script_caller = None if on_win: try: comspec = get_comspec() # fail early with KeyError if undefined except KeyError: - log.info("failed to run %s for %s due to COMSPEC KeyError", action, prec.dist_str()) + log.info( + "failed to run %s for %s due to COMSPEC KeyError", + action, + prec.dist_str(), + ) return False if activate: script_caller, command_args = wrap_subprocess_call( @@ -1198,9 +1521,9 @@ def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False ("@CALL", path), ) else: - command_args = [comspec, '/d', '/c', path] + command_args = [comspec, "/d", "/c", path] else: - shell_path = 'sh' if 'bsd' in sys.platform else 'bash' + shell_path = "sh" if "bsd" in sys.platform else "bash" if activate: script_caller, command_args = wrap_subprocess_call( context.root_prefix, @@ -1210,29 +1533,37 @@ def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False (".", path), ) else: - shell_path = 'sh' if 'bsd' in sys.platform else 'bash' + shell_path = "sh" if "bsd" in sys.platform else "bash" command_args = [shell_path, "-x", path] - env['ROOT_PREFIX'] = context.root_prefix - env['PREFIX'] = env_prefix or prefix - env['PKG_NAME'] = prec.name - env['PKG_VERSION'] = prec.version - env['PKG_BUILDNUM'] = prec.build_number - env['PATH'] = os.pathsep.join((dirname(path), env.get('PATH', ''))) - - log.debug("for %s at %s, executing script: $ %s", - prec.dist_str(), env['PREFIX'], ' '.join(command_args)) + env["ROOT_PREFIX"] = context.root_prefix + env["PREFIX"] = env_prefix or prefix + env["PKG_NAME"] = prec.name + env["PKG_VERSION"] = prec.version + env["PKG_BUILDNUM"] = prec.build_number + env["PATH"] = os.pathsep.join((dirname(path), env.get("PATH", ""))) + + log.debug( + "for %s at %s, executing script: $ %s", + prec.dist_str(), + env["PREFIX"], + " ".join(command_args), + ) try: - response = subprocess_call(command_args, env=env, path=dirname(path), raise_on_error=False) + response = subprocess_call( + command_args, env=env, path=dirname(path), raise_on_error=False + ) if response.rc != 0: m = messages(prefix) - if action in ('pre-link', 'post-link'): - if 'openssl' in prec.dist_str(): + if action in ("pre-link", "post-link"): + if "openssl" in prec.dist_str(): # this is a hack for conda-build string parsing in the conda_build/build.py # create_env function message = f"{action} failed for: {prec}" else: - message = dals(""" + message = ( + dals( + """ %s script failed for package %s location of failed script: %s ==> script messages <== @@ -1241,33 +1572,50 @@ def run_script(prefix, prec, action='post-link', env_prefix=None, activate=False stdout: %s stderr: %s return code: %s - """) % (action, prec.dist_str(), path, m or "", - response.stdout, response.stderr, response.rc) + """ + ) + % ( + action, + prec.dist_str(), + path, + m or "", + response.stdout, + response.stderr, + response.rc, + ) + ) raise LinkError(message) else: - log.warn("%s script failed for package %s\n" - "consider notifying the package maintainer", action, prec.dist_str()) + log.warn( + "%s script failed for package %s\n" + "consider notifying the package maintainer", + action, + prec.dist_str(), + ) return False else: messages(prefix) return True finally: if script_caller is not None: - if 'CONDA_TEST_SAVE_TEMPS' not in os.environ: + if "CONDA_TEST_SAVE_TEMPS" not in os.environ: rm_rf(script_caller) else: - log.warning('CONDA_TEST_SAVE_TEMPS :: retaining run_script {}'.format( - script_caller)) + log.warning( + "CONDA_TEST_SAVE_TEMPS :: retaining run_script {}".format( + script_caller + ) + ) def messages(prefix): - path = join(prefix, '.messages.txt') + path = join(prefix, ".messages.txt") try: if isfile(path): with open(path) as fi: m = fi.read() if hasattr(m, "decode"): - m = m.decode('utf-8') + m = m.decode("utf-8") print(m, file=sys.stderr if context.json else sys.stdout) return m finally: diff --git a/conda/core/package_cache.py b/conda/core/package_cache.py index aa2dda7451c..e7c33ed062c 100644 --- a/conda/core/package_cache.py +++ b/conda/core/package_cache.py @@ -1,6 +1,6 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - # for conda-build from .package_cache_data import ProgressiveFetchExtract + ProgressiveFetchExtract = ProgressiveFetchExtract diff --git a/conda/core/package_cache_data.py b/conda/core/package_cache_data.py index e10d5177552..c6b224eed23 100644 --- a/conda/core/package_cache_data.py +++ b/conda/core/package_cache_data.py @@ -4,10 +4,10 @@ import codecs import os - from collections import defaultdict -from concurrent.futures import as_completed, ThreadPoolExecutor, Future +from concurrent.futures import Future, ThreadPoolExecutor, as_completed from errno import EACCES, ENOENT, EPERM, EROFS +from functools import partial from itertools import chain from json import JSONDecodeError from logging import getLogger @@ -15,18 +15,16 @@ from os.path import basename, dirname, getsize, join from sys import platform from tarfile import ReadError -from functools import partial from conda.common.iterators import groupby_to_dict as groupby -from .path_actions import CacheUrlAction, ExtractPackageAction from .. import CondaError, CondaMultiError, conda_signal_handler from ..auxlib.collection import first from ..auxlib.decorators import memoizemethod from ..base.constants import ( - CONDA_PACKAGE_EXTENSIONS, CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2, + CONDA_PACKAGE_EXTENSIONS, PACKAGE_CACHE_MAGIC_FILE, ) from ..base.context import context @@ -35,7 +33,7 @@ from ..common.path import expand, strip_pkg_extension, url_to_path from ..common.signals import signal_handler from ..common.url import path_to_url -from ..exceptions import NoWritablePkgsDirError, NotWritableError +from ..exceptions import NotWritableError, NoWritablePkgsDirError from ..gateways.disk.create import ( create_package_cache_directory, extract_tarball, @@ -55,6 +53,7 @@ from ..models.match_spec import MatchSpec from ..models.records import PackageCacheRecord, PackageRecord from ..utils import human_bytes +from .path_actions import CacheUrlAction, ExtractPackageAction log = getLogger(__name__) @@ -95,8 +94,9 @@ def __init__(self, pkgs_dir): self._urls_data = UrlsData(pkgs_dir) def insert(self, package_cache_record): - - meta = join(package_cache_record.extracted_package_dir, "info", "repodata_record.json") + meta = join( + package_cache_record.extracted_package_dir, "info", "repodata_record.json" + ) write_as_json_to_file(meta, PackageRecord.from_objects(package_cache_record)) self._package_cache_records[package_cache_record] = package_cache_record @@ -150,10 +150,18 @@ def query(self, package_ref_or_match_spec): if isinstance(param, str): param = MatchSpec(param) if isinstance(param, MatchSpec): - return (pcrec for pcrec in self._package_cache_records.values() if param.match(pcrec)) + return ( + pcrec + for pcrec in self._package_cache_records.values() + if param.match(pcrec) + ) else: assert isinstance(param, PackageRecord) - return (pcrec for pcrec in self._package_cache_records.values() if pcrec == param) + return ( + pcrec + for pcrec in self._package_cache_records.values() + if pcrec == param + ) def iter_records(self): return iter(self._package_cache_records) @@ -196,7 +204,9 @@ def first_writable(cls, pkgs_dirs=None): def writable_caches(cls, pkgs_dirs=None): if pkgs_dirs is None: pkgs_dirs = context.pkgs_dirs - writable_caches = tuple(filter(lambda c: c.is_writable, (cls(pd) for pd in pkgs_dirs))) + writable_caches = tuple( + filter(lambda c: c.is_writable, (cls(pd) for pd in pkgs_dirs)) + ) return writable_caches @classmethod @@ -250,11 +260,15 @@ def get_entry_to_link(cls, package_ref): ) if pc_entry is not None: return pc_entry - raise CondaError("No package '%s' found in cache directories." % package_ref.dist_str()) + raise CondaError( + "No package '%s' found in cache directories." % package_ref.dist_str() + ) @classmethod def tarball_file_in_cache(cls, tarball_path, md5sum=None, exclude_caches=()): - tarball_full_path, md5sum = cls._clean_tarball_path_and_get_md5sum(tarball_path, md5sum) + tarball_full_path, md5sum = cls._clean_tarball_path_and_get_md5sum( + tarball_path, md5sum + ) pc_entry = first( cls(pkgs_dir).tarball_file_in_this_cache(tarball_full_path, md5sum) for pkgs_dir in context.pkgs_dirs @@ -267,11 +281,14 @@ def clear(cls): cls._cache_.clear() def tarball_file_in_this_cache(self, tarball_path, md5sum=None): - tarball_full_path, md5sum = self._clean_tarball_path_and_get_md5sum(tarball_path, md5sum) + tarball_full_path, md5sum = self._clean_tarball_path_and_get_md5sum( + tarball_path, md5sum + ) tarball_basename = basename(tarball_full_path) pc_entry = first( (pc_entry for pc_entry in self.values()), - key=lambda pce: pce.tarball_basename == tarball_basename and pce.md5 == md5sum, + key=lambda pce: pce.tarball_basename == tarball_basename + and pce.md5 == md5sum, ) return pc_entry @@ -373,7 +390,9 @@ def _make_single_record(self, package_filename): e, ) - if isdir(extracted_package_dir) and not isfile(package_tarball_full_path): + if isdir(extracted_package_dir) and not isfile( + package_tarball_full_path + ): # We have a directory that looks like a conda package, but without # (1) info/repodata_record.json or info/index.json, and (2) a conda package # tarball, there's not much we can do. We'll just ignore it. @@ -386,7 +405,9 @@ def _make_single_record(self, package_filename): # to do is remove it and try extracting. rm_rf(extracted_package_dir) try: - extract_tarball(package_tarball_full_path, extracted_package_dir) + extract_tarball( + package_tarball_full_path, extracted_package_dir + ) except (OSError, InvalidArchiveError) as e: if e.errno == ENOENT: # FileNotFoundError(2, 'No such file or directory') @@ -405,8 +426,15 @@ def _make_single_record(self, package_filename): rm_rf(extracted_package_dir) return None else: - raw_json_record = read_index_json_from_tarball(package_tarball_full_path) - except (EOFError, ReadError, FileNotFoundError, InvalidArchiveError) as e: + raw_json_record = read_index_json_from_tarball( + package_tarball_full_path + ) + except ( + EOFError, + ReadError, + FileNotFoundError, + InvalidArchiveError, + ) as e: # EOFError: Compressed file ended before the end-of-stream marker was reached # tarfile.ReadError: file could not be opened successfully # We have a corrupted tarball. Remove the tarball so it doesn't affect @@ -439,12 +467,18 @@ def _make_single_record(self, package_filename): # write the info/repodata_record.json file so we can short-circuit this next time if self.is_writable: repodata_record = PackageRecord.from_objects(package_cache_record) - repodata_record_path = join(extracted_package_dir, "info", "repodata_record.json") + repodata_record_path = join( + extracted_package_dir, "info", "repodata_record.json" + ) try: write_as_json_to_file(repodata_record_path, repodata_record) except OSError as e: - if e.errno in (EACCES, EPERM, EROFS) and isdir(dirname(repodata_record_path)): - raise NotWritableError(repodata_record_path, e.errno, caused_by=e) + if e.errno in (EACCES, EPERM, EROFS) and isdir( + dirname(repodata_record_path) + ): + raise NotWritableError( + repodata_record_path, e.errno, caused_by=e + ) else: raise @@ -562,7 +596,11 @@ def pcrec_matches(pcrec): ), None, ) - if extracted_pcrec and pcrec_matches(extracted_pcrec) and extracted_pcrec.get("url"): + if ( + extracted_pcrec + and pcrec_matches(extracted_pcrec) + and extracted_pcrec.get("url") + ): return None, None # there is no extracted dist that can work, so now we look for tarballs that @@ -574,7 +612,8 @@ def pcrec_matches(pcrec): ( pcrec for pcrec in chain.from_iterable( - pcache.query(pref_or_spec) for pcache in PackageCacheData.writable_caches() + pcache.query(pref_or_spec) + for pcache in PackageCacheData.writable_caches() ) if pcrec.is_fetched ), @@ -588,8 +627,12 @@ def pcrec_matches(pcrec): # extract in place extract_action = ExtractPackageAction( source_full_path=pcrec_from_writable_cache.package_tarball_full_path, - target_pkgs_dir=dirname(pcrec_from_writable_cache.package_tarball_full_path), - target_extracted_dirname=basename(pcrec_from_writable_cache.extracted_package_dir), + target_pkgs_dir=dirname( + pcrec_from_writable_cache.package_tarball_full_path + ), + target_extracted_dirname=basename( + pcrec_from_writable_cache.extracted_package_dir + ), record_or_spec=pcrec_from_writable_cache, sha256=pcrec_from_writable_cache.sha256 or sha256, size=pcrec_from_writable_cache.size or size, @@ -601,7 +644,8 @@ def pcrec_matches(pcrec): ( pcrec for pcrec in chain.from_iterable( - pcache.query(pref_or_spec) for pcache in PackageCacheData.read_only_caches() + pcache.query(pref_or_spec) + for pcache in PackageCacheData.read_only_caches() ) if pcrec.is_fetched ), @@ -621,7 +665,9 @@ def pcrec_matches(pcrec): size=pcrec_from_read_only_cache.get("size") or size, md5=pcrec_from_read_only_cache.get("md5") or md5, ) - trgt_extracted_dirname = strip_pkg_extension(pcrec_from_read_only_cache.fn)[0] + trgt_extracted_dirname = strip_pkg_extension(pcrec_from_read_only_cache.fn)[ + 0 + ] extract_action = ExtractPackageAction( source_full_path=cache_action.target_full_path, target_pkgs_dir=first_writable_cache.pkgs_dir, @@ -673,7 +719,9 @@ def __init__(self, link_prefs): "\n ".join(pkg_rec.dist_str() for pkg_rec in link_prefs), ) - self.paired_actions = {} # Map[pref, Tuple(CacheUrlAction, ExtractPackageAction)] + self.paired_actions = ( + {} + ) # Map[pref, Tuple(CacheUrlAction, ExtractPackageAction)] self._prepared = False self._executed = False @@ -730,11 +778,15 @@ def execute(self): with signal_handler(conda_signal_handler), time_recorder( "fetch_extract_execute" - ), ThreadPoolExecutor(context.fetch_threads) as fetch_executor, ThreadPoolExecutor( + ), ThreadPoolExecutor( + context.fetch_threads + ) as fetch_executor, ThreadPoolExecutor( EXTRACT_THREADS ) as extract_executor: - - for prec_or_spec, (cache_action, extract_action) in self.paired_actions.items(): + for prec_or_spec, ( + cache_action, + extract_action, + ) in self.paired_actions.items(): if cache_action is None and extract_action is None: # Not sure when this is reached. continue diff --git a/conda/core/path_actions.py b/conda/core/path_actions.py index 88de7308348..833a1f894be 100644 --- a/conda/core/path_actions.py +++ b/conda/core/path_actions.py @@ -1,35 +1,49 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import re +import sys from abc import ABCMeta, abstractmethod, abstractproperty from itertools import chain from json import JSONDecodeError from logging import getLogger from os.path import basename, dirname, getsize, isdir, join -import re -import sys from uuid import uuid4 -from .envs_manager import get_user_environments_txt_file, register_env, unregister_env -from .portability import _PaddingError, update_prefix -from .prefix_data import PrefixData from .. import CondaError from ..auxlib.ish import dals from ..base.constants import CONDA_TEMP_EXTENSION from ..base.context import context from ..common.compat import on_win -from ..common.path import (get_bin_directory_short_path, get_leaf_directories, - get_python_noarch_target_path, get_python_short_path, - parse_entry_point_def, - pyc_path, url_to_path, win_path_ok) +from ..common.path import ( + get_bin_directory_short_path, + get_leaf_directories, + get_python_noarch_target_path, + get_python_short_path, + parse_entry_point_def, + pyc_path, + url_to_path, + win_path_ok, +) from ..common.url import has_platform, path_to_url -from ..exceptions import (CondaUpgradeError, CondaVerificationError, NotWritableError, - PaddingError, SafetyError) +from ..exceptions import ( + CondaUpgradeError, + CondaVerificationError, + NotWritableError, + PaddingError, + SafetyError, +) from ..gateways.connection.download import download -from ..gateways.disk.create import (compile_multiple_pyc, copy, - create_hard_link_or_copy, create_link, - create_python_entry_point, extract_tarball, - make_menu, mkdir_p, write_as_json_to_file) +from ..gateways.disk.create import ( + compile_multiple_pyc, + copy, + create_hard_link_or_copy, + create_link, + create_python_entry_point, + extract_tarball, + make_menu, + mkdir_p, + write_as_json_to_file, +) from ..gateways.disk.delete import rm_rf from ..gateways.disk.permissions import make_writable from ..gateways.disk.read import compute_sum, islink, lexists, read_index_json @@ -38,8 +52,17 @@ from ..models.channel import Channel from ..models.enums import LinkType, NoarchType, PathType from ..models.match_spec import MatchSpec -from ..models.records import (Link, PackageCacheRecord, PackageRecord, PathDataV1, PathsData, - PrefixRecord) +from ..models.records import ( + Link, + PackageCacheRecord, + PackageRecord, + PathDataV1, + PathsData, + PrefixRecord, +) +from .envs_manager import get_user_environments_txt_file, register_env, unregister_env +from .portability import _PaddingError, update_prefix +from .prefix_data import PrefixData try: FileNotFoundError @@ -49,13 +72,13 @@ log = getLogger(__name__) REPR_IGNORE_KWARGS = ( - 'transaction_context', - 'package_info', - 'hold_path', + "transaction_context", + "package_info", + "hold_path", ) -class PathAction(metaclass=ABCMeta): +class PathAction(metaclass=ABCMeta): _verified = False @abstractmethod @@ -95,7 +118,6 @@ def __repr__(self): class MultiPathAction(metaclass=ABCMeta): - _verified = False @abstractmethod @@ -190,24 +212,24 @@ def source_full_path(self): class LinkPathAction(CreateInPrefixPathAction): - @classmethod - def create_file_link_actions(cls, transaction_context, package_info, target_prefix, - requested_link_type): + def create_file_link_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): def get_prefix_replace(source_path_data): if source_path_data.path_type == PathType.softlink: link_type = LinkType.copy - prefix_placehoder, file_mode = '', None + prefix_placehoder, file_mode = "", None elif source_path_data.prefix_placeholder: link_type = LinkType.copy prefix_placehoder = source_path_data.prefix_placeholder file_mode = source_path_data.file_mode elif source_path_data.no_link: link_type = LinkType.copy - prefix_placehoder, file_mode = '', None + prefix_placehoder, file_mode = "", None else: link_type = requested_link_type - prefix_placehoder, file_mode = '', None + prefix_placehoder, file_mode = "", None return link_type, prefix_placehoder, file_mode @@ -220,51 +242,95 @@ def make_file_link_action(source_path_data): if noarch is not None: noarch = noarch.type if noarch == NoarchType.python: - sp_dir = transaction_context['target_site_packages_short_path'] + sp_dir = transaction_context["target_site_packages_short_path"] if sp_dir is None: - raise CondaError("Unable to determine python site-packages " - "dir in target_prefix!\nPlease make sure " - "python is installed in %s" % target_prefix) - target_short_path = get_python_noarch_target_path(source_path_data.path, sp_dir) + raise CondaError( + "Unable to determine python site-packages " + "dir in target_prefix!\nPlease make sure " + "python is installed in %s" % target_prefix + ) + target_short_path = get_python_noarch_target_path( + source_path_data.path, sp_dir + ) elif noarch is None or noarch == NoarchType.generic: target_short_path = source_path_data.path else: - raise CondaUpgradeError(dals(""" + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to install this package. - Please update conda.""")) + Please update conda.""" + ) + ) link_type, placeholder, fmode = get_prefix_replace(source_path_data) if placeholder: - return PrefixReplaceLinkAction(transaction_context, package_info, - package_info.extracted_package_dir, - source_path_data.path, - target_prefix, target_short_path, - requested_link_type, - placeholder, fmode, source_path_data) + return PrefixReplaceLinkAction( + transaction_context, + package_info, + package_info.extracted_package_dir, + source_path_data.path, + target_prefix, + target_short_path, + requested_link_type, + placeholder, + fmode, + source_path_data, + ) else: - return LinkPathAction(transaction_context, package_info, - package_info.extracted_package_dir, source_path_data.path, - target_prefix, target_short_path, - link_type, source_path_data) - return tuple(make_file_link_action(spi) for spi in package_info.paths_data.paths) + return LinkPathAction( + transaction_context, + package_info, + package_info.extracted_package_dir, + source_path_data.path, + target_prefix, + target_short_path, + link_type, + source_path_data, + ) + + return tuple( + make_file_link_action(spi) for spi in package_info.paths_data.paths + ) @classmethod - def create_directory_actions(cls, transaction_context, package_info, target_prefix, - requested_link_type, file_link_actions): - leaf_directories = get_leaf_directories(axn.target_short_path for axn in file_link_actions) + def create_directory_actions( + cls, + transaction_context, + package_info, + target_prefix, + requested_link_type, + file_link_actions, + ): + leaf_directories = get_leaf_directories( + axn.target_short_path for axn in file_link_actions + ) return tuple( - cls(transaction_context, package_info, None, None, - target_prefix, directory_short_path, LinkType.directory, None) + cls( + transaction_context, + package_info, + None, + None, + target_prefix, + directory_short_path, + LinkType.directory, + None, + ) for directory_short_path in leaf_directories ) @classmethod - def create_python_entry_point_windows_exe_action(cls, transaction_context, package_info, - target_prefix, requested_link_type, - entry_point_def): + def create_python_entry_point_windows_exe_action( + cls, + transaction_context, + package_info, + target_prefix, + requested_link_type, + entry_point_def, + ): source_directory = context.conda_prefix - source_short_path = 'Scripts/conda.exe' + source_short_path = "Scripts/conda.exe" command, _, _ = parse_entry_point_def(entry_point_def) target_short_path = "Scripts/%s.exe" % command source_path_data = PathDataV1( @@ -340,7 +406,10 @@ def verify(self): self.source_path_data, path_type=source_path_type or PathType.softlink, ) - elif self.link_type == LinkType.copy and source_path_data.path_type == PathType.softlink: + elif ( + self.link_type == LinkType.copy + and source_path_data.path_type == PathType.softlink + ): self.prefix_path_data = PathDataV1.from_objects( self.source_path_data, path_type=source_path_type or PathType.softlink, @@ -418,8 +487,12 @@ def verify(self): def execute(self): log.trace("linking %s => %s", self.source_full_path, self.target_full_path) - create_link(self.source_full_path, self.target_full_path, self.link_type, - force=context.force) + create_link( + self.source_full_path, + self.target_full_path, + self.link_type, + force=context.force, + ) self._execute_successful = True def reverse(self): @@ -466,13 +539,16 @@ def verify(self): if islink(self.source_full_path): log.trace( - "ignoring prefix update for symlink with source path %s", self.source_full_path + "ignoring prefix update for symlink with source path %s", + self.source_full_path, ) # return assert False, "I don't think this is the right place to ignore this" mkdir_p(self.transaction_context["temp_dir"]) - self.intermediate_path = join(self.transaction_context["temp_dir"], str(uuid4())) + self.intermediate_path = join( + self.transaction_context["temp_dir"], str(uuid4()) + ) log.trace("copying %s => %s", self.source_full_path, self.intermediate_path) create_link(self.source_full_path, self.intermediate_path, LinkType.copy) @@ -480,13 +556,19 @@ def verify(self): try: log.trace("rewriting prefixes in %s", self.target_full_path) - update_prefix(self.intermediate_path, - context.target_prefix_override or self.target_prefix, - self.prefix_placeholder, - self.file_mode, subdir=self.package_info.repodata_record.subdir) + update_prefix( + self.intermediate_path, + context.target_prefix_override or self.target_prefix, + self.prefix_placeholder, + self.file_mode, + subdir=self.package_info.repodata_record.subdir, + ) except _PaddingError: - raise PaddingError(self.target_full_path, self.prefix_placeholder, - len(self.prefix_placeholder)) + raise PaddingError( + self.target_full_path, + self.prefix_placeholder, + len(self.prefix_placeholder), + ) sha256_in_prefix = compute_sum(self.intermediate_path, "sha256") @@ -510,9 +592,10 @@ def execute(self): class MakeMenuAction(CreateInPrefixPathAction): - @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): + def create_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): if on_win and context.shortcuts: MENU_RE = re.compile(r"^menu/.*\.json$", re.IGNORECASE) return tuple( @@ -523,9 +606,16 @@ def create_actions(cls, transaction_context, package_info, target_prefix, reques else: return () - def __init__(self, transaction_context, package_info, target_prefix, target_short_path): + def __init__( + self, transaction_context, package_info, target_prefix, target_short_path + ): super().__init__( - transaction_context, package_info, None, None, target_prefix, target_short_path + transaction_context, + package_info, + None, + None, + target_prefix, + target_short_path, ) self._execute_successful = False @@ -542,14 +632,18 @@ def reverse(self): class CreateNonadminAction(CreateInPrefixPathAction): @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): + def create_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): if on_win and lexists(join(context.root_prefix, ".nonadmin")): return (cls(transaction_context, package_info, target_prefix),) else: return () def __init__(self, transaction_context, package_info, target_prefix): - super().__init__(transaction_context, package_info, None, None, target_prefix, ".nonadmin") + super().__init__( + transaction_context, package_info, None, None, target_prefix, ".nonadmin" + ) self._file_created = False def execute(self): @@ -583,7 +677,15 @@ def create_actions( and noarch_py_file_re.match(axn.source_short_path) ) pyc_files = tuple(pyc_path(pf, py_ver) for pf in py_files) - return (cls(transaction_context, package_info, target_prefix, py_files, pyc_files),) + return ( + cls( + transaction_context, + package_info, + target_prefix, + py_files, + pyc_files, + ), + ) else: return () @@ -602,7 +704,12 @@ def __init__( self.target_short_paths = target_short_paths self.prefix_path_data = None self.prefix_paths_data = [ - PathDataV1(_path=p, path_type=PathType.pyc_file,) for p in self.target_short_paths] + PathDataV1( + _path=p, + path_type=PathType.pyc_file, + ) + for p in self.target_short_paths + ] self._execute_successful = False @property @@ -612,6 +719,7 @@ def join_or_none(prefix, short_path): return None else: return join(prefix, win_path_ok(short_path)) + return (join_or_none(self.target_prefix, p) for p in self.target_short_paths) @property @@ -621,6 +729,7 @@ def join_or_none(prefix, short_path): return None else: return join(prefix, win_path_ok(short_path)) + return (join_or_none(self.target_prefix, p) for p in self.source_short_paths) def verify(self): @@ -635,18 +744,23 @@ def execute(self): # installed into a python 2 environment, but no code paths actually importing it # technically then, this file should be removed from the manifest in conda-meta, but # at the time of this writing that's not currently happening - log.trace("compiling %s", ' '.join(self.target_full_paths)) - target_python_version = self.transaction_context['target_python_version'] + log.trace("compiling %s", " ".join(self.target_full_paths)) + target_python_version = self.transaction_context["target_python_version"] python_short_path = get_python_short_path(target_python_version) python_full_path = join(self.target_prefix, win_path_ok(python_short_path)) - compile_multiple_pyc(python_full_path, self.source_full_paths, self.target_full_paths, - self.target_prefix, self.transaction_context['target_python_version']) + compile_multiple_pyc( + python_full_path, + self.source_full_paths, + self.target_full_paths, + self.target_prefix, + self.transaction_context["target_python_version"], + ) self._execute_successful = True def reverse(self): # this removes all pyc files even if they were not created if self._execute_successful: - log.trace("reversing pyc creation %s", ' '.join(self.target_full_paths)) + log.trace("reversing pyc creation %s", " ".join(self.target_full_paths)) for target_full_path in self.target_full_paths: rm_rf(target_full_path) @@ -677,7 +791,9 @@ def __init__(self, *individuals, **kw): class CreatePythonEntryPointAction(CreateInPrefixPathAction): @classmethod - def create_actions(cls, transaction_context, package_info, target_prefix, requested_link_type): + def create_actions( + cls, transaction_context, package_info, target_prefix, requested_link_type + ): noarch = package_info.package_metadata and package_info.package_metadata.noarch if noarch is not None and noarch.type == NoarchType.python: @@ -689,7 +805,12 @@ def this_triplet(entry_point_def): return target_short_path, module, func actions = tuple( - cls(transaction_context, package_info, target_prefix, *this_triplet(ep_def)) + cls( + transaction_context, + package_info, + target_prefix, + *this_triplet(ep_def), + ) for ep_def in noarch.entry_points or () ) @@ -710,10 +831,21 @@ def this_triplet(entry_point_def): return () def __init__( - self, transaction_context, package_info, target_prefix, target_short_path, module, func + self, + transaction_context, + package_info, + target_prefix, + target_short_path, + module, + func, ): super().__init__( - transaction_context, package_info, None, None, target_prefix, target_short_path + transaction_context, + package_info, + None, + None, + target_prefix, + target_short_path, ) self.module = module self.func = func @@ -734,14 +866,16 @@ def execute(self): if on_win: python_full_path = None else: - target_python_version = self.transaction_context['target_python_version'] + target_python_version = self.transaction_context["target_python_version"] python_short_path = get_python_short_path(target_python_version) python_full_path = join( context.target_prefix_override or self.target_prefix, - win_path_ok(python_short_path)) + win_path_ok(python_short_path), + ) - create_python_entry_point(self.target_full_path, python_full_path, - self.module, self.func) + create_python_entry_point( + self.target_full_path, python_full_path, self.module, self.func + ) self._execute_successful = True def reverse(self): @@ -929,7 +1063,6 @@ def create_actions( requested_spec, all_link_path_actions, ): - extracted_package_dir = package_info.extracted_package_dir target_short_path = "conda-meta/%s.json" % basename(extracted_package_dir) return ( @@ -955,7 +1088,12 @@ def __init__( all_link_path_actions, ): super().__init__( - transaction_context, package_info, None, None, target_prefix, target_short_path + transaction_context, + package_info, + None, + None, + target_prefix, + target_short_path, ) self.requested_link_type = requested_link_type self.requested_spec = requested_spec @@ -974,10 +1112,15 @@ def files_from_action(link_path_action): if isinstance(link_path_action, CompileMultiPycAction): return link_path_action.target_short_paths else: - return ((link_path_action.target_short_path, ) - if isinstance(link_path_action, CreateInPrefixPathAction) and - (not hasattr(link_path_action, 'link_type') or - link_path_action.link_type != LinkType.directory) else ()) + return ( + (link_path_action.target_short_path,) + if isinstance(link_path_action, CreateInPrefixPathAction) + and ( + not hasattr(link_path_action, "link_type") + or link_path_action.link_type != LinkType.directory + ) + else () + ) def paths_from_action(link_path_action): if isinstance(link_path_action, CompileMultiPycAction): @@ -992,7 +1135,9 @@ def paths_from_action(link_path_action): return (link_path_action.prefix_path_data,) files = list( - chain.from_iterable(files_from_action(x) for x in self.all_link_path_actions if x) + chain.from_iterable( + files_from_action(x) for x in self.all_link_path_actions if x + ) ) paths_data = PathsData( paths_version=1, @@ -1021,13 +1166,20 @@ def paths_from_action(link_path_action): def reverse(self): log.trace("reversing linked package record creation %s", self.target_full_path) if self._execute_successful: - PrefixData(self.target_prefix).remove(self.package_info.repodata_record.name) + PrefixData(self.target_prefix).remove( + self.package_info.repodata_record.name + ) class UpdateHistoryAction(CreateInPrefixPathAction): @classmethod def create_actions( - cls, transaction_context, target_prefix, remove_specs, update_specs, neutered_specs + cls, + transaction_context, + target_prefix, + remove_specs, + update_specs, + neutered_specs, ): target_short_path = join("conda-meta", "history") return ( @@ -1050,7 +1202,9 @@ def __init__( update_specs, neutered_specs, ): - super().__init__(transaction_context, None, None, None, target_prefix, target_short_path) + super().__init__( + transaction_context, None, None, None, target_prefix, target_short_path + ) self.remove_specs = remove_specs self.update_specs = update_specs self.neutered_specs = neutered_specs @@ -1077,7 +1231,6 @@ def cleanup(self): class RegisterEnvironmentLocationAction(PathAction): - def __init__(self, transaction_context, target_prefix): self.transaction_context = transaction_context self.target_prefix = target_prefix @@ -1090,8 +1243,11 @@ def verify(self): touch(user_environments_txt_file, mkdir=True, sudo_safe=True) self._verified = True except NotWritableError: - log.warn("Unable to create environments file. Path not writable.\n" - " environment location: %s\n", user_environments_txt_file) + log.warn( + "Unable to create environments file. Path not writable.\n" + " environment location: %s\n", + user_environments_txt_file, + ) def execute(self): log.trace("registering environment in catalog %s", self.target_prefix) @@ -1116,7 +1272,9 @@ def target_full_path(self): class RemoveFromPrefixPathAction(PrefixPathAction, metaclass=ABCMeta): - def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path): + def __init__( + self, transaction_context, linked_package_data, target_prefix, target_short_path + ): super().__init__(transaction_context, target_prefix, target_short_path) self.linked_package_data = linked_package_data @@ -1144,12 +1302,18 @@ def __init__( def execute(self): if self.link_type != LinkType.directory: - log.trace("renaming %s => %s", self.target_short_path, self.holding_short_path) + log.trace( + "renaming %s => %s", self.target_short_path, self.holding_short_path + ) backoff_rename(self.target_full_path, self.holding_full_path, force=True) def reverse(self): if self.link_type != LinkType.directory and lexists(self.holding_full_path): - log.trace("reversing rename %s => %s", self.holding_short_path, self.target_short_path) + log.trace( + "reversing rename %s => %s", + self.holding_short_path, + self.target_short_path, + ) backoff_rename(self.holding_full_path, self.target_full_path, force=True) def cleanup(self): @@ -1158,7 +1322,6 @@ def cleanup(self): class RemoveMenuAction(RemoveFromPrefixPathAction): - @classmethod def create_actions(cls, transaction_context, linked_package_data, target_prefix): if on_win: @@ -1171,7 +1334,9 @@ def create_actions(cls, transaction_context, linked_package_data, target_prefix) else: return () - def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path): + def __init__( + self, transaction_context, linked_package_data, target_prefix, target_short_path + ): super().__init__( transaction_context, linked_package_data, target_prefix, target_short_path ) @@ -1189,7 +1354,9 @@ def cleanup(self): class RemoveLinkedPackageRecordAction(UnlinkPathAction): - def __init__(self, transaction_context, linked_package_data, target_prefix, target_short_path): + def __init__( + self, transaction_context, linked_package_data, target_prefix, target_short_path + ): super().__init__( transaction_context, linked_package_data, target_prefix, target_short_path ) @@ -1234,10 +1401,17 @@ def target_full_path(self): # Fetch / Extract Actions # ###################################################### -class CacheUrlAction(PathAction): - def __init__(self, url, target_pkgs_dir, target_package_basename, - sha256=None, size=None, md5=None): +class CacheUrlAction(PathAction): + def __init__( + self, + url, + target_pkgs_dir, + target_package_basename, + sha256=None, + size=None, + md5=None, + ): self.url = url self.target_pkgs_dir = target_pkgs_dir self.target_package_basename = target_package_basename @@ -1247,13 +1421,14 @@ def __init__(self, url, target_pkgs_dir, target_package_basename, self.hold_path = self.target_full_path + CONDA_TEMP_EXTENSION def verify(self): - assert '::' not in self.url + assert "::" not in self.url self._verified = True def execute(self, progress_update_callback=None): # I hate inline imports, but I guess it's ok since we're importing from the conda.core # The alternative is passing the PackageCache class to CacheUrlAction __init__ from .package_cache_data import PackageCacheData + target_package_cache = PackageCacheData(self.target_pkgs_dir) log.trace("caching url %s => %s", self.url, self.target_full_path) @@ -1262,20 +1437,27 @@ def execute(self, progress_update_callback=None): rm_rf(self.hold_path) if lexists(self.target_full_path): - if self.url.startswith('file:/') and self.url == path_to_url(self.target_full_path): + if self.url.startswith("file:/") and self.url == path_to_url( + self.target_full_path + ): # the source and destination are the same file, so we're done return else: backoff_rename(self.target_full_path, self.hold_path, force=True) - if self.url.startswith('file:/'): + if self.url.startswith("file:/"): source_path = url_to_path(self.url) - self._execute_local(source_path, target_package_cache, progress_update_callback) + self._execute_local( + source_path, target_package_cache, progress_update_callback + ) else: self._execute_channel(target_package_cache, progress_update_callback) - def _execute_local(self, source_path, target_package_cache, progress_update_callback=None): + def _execute_local( + self, source_path, target_package_cache, progress_update_callback=None + ): from .package_cache_data import PackageCacheData + if dirname(source_path) in context.pkgs_dirs: # if url points to another package cache, link to the writable cache create_hard_link_or_copy(source_path, self.target_full_path) @@ -1284,7 +1466,9 @@ def _execute_local(self, source_path, target_package_cache, progress_update_call # the package is already in a cache, so it came from a remote url somewhere; # make sure that remote url is the most recent url in the # writable cache urls.txt - origin_url = source_package_cache._urls_data.get_url(self.target_package_basename) + origin_url = source_package_cache._urls_data.get_url( + self.target_package_basename + ) if origin_url and has_platform(origin_url, context.known_subdirs): target_package_cache._urls_data.add_url(origin_url) else: @@ -1300,7 +1484,7 @@ def _execute_local(self, source_path, target_package_cache, progress_update_call # directory that might exist in this cache because we are going to overwrite it # anyway when we extract the tarball. source_md5sum = compute_sum(source_path, "md5") - exclude_caches = self.target_pkgs_dir, + exclude_caches = (self.target_pkgs_dir,) pc_entry = PackageCacheData.tarball_file_in_cache( source_path, source_md5sum, exclude_caches=exclude_caches ) @@ -1313,8 +1497,12 @@ def _execute_local(self, source_path, target_package_cache, progress_update_call origin_url = None # copy the tarball to the writable cache - create_link(source_path, self.target_full_path, link_type=LinkType.copy, - force=context.force) + create_link( + source_path, + self.target_full_path, + link_type=LinkType.copy, + force=context.force, + ) if origin_url and has_platform(origin_url, context.known_subdirs): target_package_cache._urls_data.add_url(origin_url) @@ -1333,7 +1521,7 @@ def _execute_channel(self, target_package_cache, progress_update_callback=None): self.url, self.target_full_path, progress_update_callback=progress_update_callback, - **kwargs + **kwargs, ) target_package_cache._urls_data.add_url(self.url) @@ -1409,7 +1597,11 @@ def execute(self, progress_update_callback=None): if isinstance(self.record_or_spec, MatchSpec): url = self.record_or_spec.get_raw_value("url") assert url - channel = Channel(url) if has_platform(url, context.known_subdirs) else Channel(None) + channel = ( + Channel(url) + if has_platform(url, context.known_subdirs) + else Channel(None) + ) fn = basename(url) sha256 = self.sha256 or compute_sum(self.source_full_path, "sha256") size = getsize(self.source_full_path) @@ -1417,12 +1609,22 @@ def execute(self, progress_update_callback=None): assert size == self.size, (size, self.size) md5 = self.md5 or compute_sum(self.source_full_path, "md5") repodata_record = PackageRecord.from_objects( - raw_index_json, url=url, channel=channel, fn=fn, sha256=sha256, size=size, md5=md5, + raw_index_json, + url=url, + channel=channel, + fn=fn, + sha256=sha256, + size=size, + md5=md5, ) else: - repodata_record = PackageRecord.from_objects(self.record_or_spec, raw_index_json) + repodata_record = PackageRecord.from_objects( + self.record_or_spec, raw_index_json + ) - repodata_record_path = join(self.target_full_path, 'info', 'repodata_record.json') + repodata_record_path = join( + self.target_full_path, "info", "repodata_record.json" + ) write_as_json_to_file(repodata_record_path, repodata_record) target_package_cache = PackageCacheData(self.target_pkgs_dir) @@ -1448,5 +1650,9 @@ def target_full_path(self): return join(self.target_pkgs_dir, self.target_extracted_dirname) def __str__(self): - return ('ExtractPackageAction' - % (self.source_full_path, self.target_full_path)) + return ( + "ExtractPackageAction".format( + self.source_full_path, + self.target_full_path, + ) + ) diff --git a/conda/core/portability.py b/conda/core/portability.py index 71bfed74f74..69793adb60c 100644 --- a/conda/core/portability.py +++ b/conda/core/portability.py @@ -2,19 +2,19 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from logging import getLogger -from os.path import realpath, basename import os import re import struct import subprocess import sys +from logging import getLogger +from os.path import basename, realpath from ..auxlib.ish import dals from ..base.constants import PREFIX_PLACEHOLDER from ..base.context import context -from ..common.compat import on_win, on_linux -from ..exceptions import CondaIOError, BinaryPrefixReplacementError +from ..common.compat import on_linux, on_win +from ..exceptions import BinaryPrefixReplacementError, CondaIOError from ..gateways.disk.update import CancelOperation, update_file_in_place_as_binary from ..models.enums import FileMode @@ -22,11 +22,13 @@ # three capture groups: whole_shebang, executable, options -SHEBANG_REGEX = (br'^(#!' # pretty much the whole match string - br'(?:[ ]*)' # allow spaces between #! and beginning of the executable path - br'(/(?:\\ |[^ \n\r\t])*)' # the executable is the next text block without an escaped space or non-space whitespace character # NOQA - br'(.*)' # the rest of the line can contain option flags - br')$') # end whole_shebang group +SHEBANG_REGEX = ( + rb"^(#!" # pretty much the whole match string + rb"(?:[ ]*)" # allow spaces between #! and beginning of the executable path + rb"(/(?:\\ |[^ \n\r\t])*)" # the executable is the next text block without an escaped space or non-space whitespace character # NOQA + rb"(.*)" # the rest of the line can contain option flags + rb")$" +) # end whole_shebang group MAX_SHEBANG_LENGTH = 127 if on_linux else 512 # Not used on Windows @@ -59,7 +61,6 @@ def update_prefix( new_prefix = new_prefix.replace("\\", "/") def _update_prefix(original_data): - # Step 1. do all prefix replacement data = replace_prefix(mode, original_data, placeholder, new_prefix) @@ -75,19 +76,29 @@ def _update_prefix(original_data): # Step 4. if we have a binary file, make sure the byte size is the same before # and after the update if mode == FileMode.binary and len(data) != len(original_data): - raise BinaryPrefixReplacementError(path, placeholder, new_prefix, - len(original_data), len(data)) + raise BinaryPrefixReplacementError( + path, placeholder, new_prefix, len(original_data), len(data) + ) return data updated = update_file_in_place_as_binary(realpath(path), _update_prefix) - if updated and mode == FileMode.binary and subdir == "osx-arm64" and sys.platform == "darwin": + if ( + updated + and mode == FileMode.binary + and subdir == "osx-arm64" + and sys.platform == "darwin" + ): # Apple arm64 needs signed executables - subprocess.run(['/usr/bin/codesign', '-s', '-', '-f', realpath(path)], capture_output=True) + subprocess.run( + ["/usr/bin/codesign", "-s", "-", "-f", realpath(path)], capture_output=True + ) -def replace_prefix(mode: FileMode, data: bytes, placeholder: str, new_prefix: str) -> bytes: +def replace_prefix( + mode: FileMode, data: bytes, placeholder: str, new_prefix: str +) -> bytes: """ Replaces `placeholder` text with the `new_prefix` provided. The `mode` provided can either be text or binary. @@ -110,14 +121,23 @@ def replace_prefix(mode: FileMode, data: bytes, placeholder: str, new_prefix: st shebang_line, rest_of_data = data[:newline_pos], data[newline_pos:] shebang_placeholder = f"#!{placeholder}".encode(encoding) if shebang_placeholder in shebang_line: - escaped_shebang = f"#!{new_prefix}".replace(" ", "\\ ").encode(encoding) - shebang_line = shebang_line.replace(shebang_placeholder, escaped_shebang) + escaped_shebang = f"#!{new_prefix}".replace(" ", "\\ ").encode( + encoding + ) + shebang_line = shebang_line.replace( + shebang_placeholder, escaped_shebang + ) data = shebang_line + rest_of_data # the rest of the file can be replaced normally - data = data.replace(placeholder.encode(encoding), new_prefix.encode(encoding)) + data = data.replace( + placeholder.encode(encoding), new_prefix.encode(encoding) + ) elif mode == FileMode.binary: data = binary_replace( - data, placeholder.encode(encoding), new_prefix.encode(encoding), encoding=encoding + data, + placeholder.encode(encoding), + new_prefix.encode(encoding), + encoding=encoding, ) else: raise CondaIOError("Invalid mode: %r" % mode) @@ -166,8 +186,9 @@ def replace(match): return data + def has_pyzzer_entry_point(data): - pos = data.rfind(b'PK\x05\x06') + pos = data.rfind(b"PK\x05\x06") return pos >= 0 @@ -197,24 +218,24 @@ def replace_pyzzer_entry_point_shebang(all_data, placeholder, new_prefix): # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. launcher = shebang = None - pos = all_data.rfind(b'PK\x05\x06') + pos = all_data.rfind(b"PK\x05\x06") if pos >= 0: - end_cdr = all_data[pos + 12:pos + 20] - cdr_size, cdr_offset = struct.unpack(' 0: - pos = all_data.rfind(b'#!', 0, arc_pos) + pos = all_data.rfind(b"#!", 0, arc_pos) if pos >= 0: shebang = all_data[pos:arc_pos] if pos > 0: launcher = all_data[:pos] if data and shebang and launcher: - if hasattr(placeholder, 'encode'): - placeholder = placeholder.encode('utf-8') - if hasattr(new_prefix, 'encode'): - new_prefix = new_prefix.encode('utf-8') + if hasattr(placeholder, "encode"): + placeholder = placeholder.encode("utf-8") + if hasattr(new_prefix, "encode"): + new_prefix = new_prefix.encode("utf-8") shebang = shebang.replace(placeholder, new_prefix) all_data = b"".join([launcher, shebang, data]) return all_data @@ -234,7 +255,9 @@ def replace_long_shebang(mode, data): whole_shebang, executable, options = shebang_match.groups() prefix, executable_name = executable.decode("utf-8").rsplit("/", 1) if len(whole_shebang) > MAX_SHEBANG_LENGTH or "\\ " in prefix: - new_shebang = f"#!/usr/bin/env {executable_name}{options.decode('utf-8')}" + new_shebang = ( + f"#!/usr/bin/env {executable_name}{options.decode('utf-8')}" + ) data = data.replace(whole_shebang, new_shebang.encode("utf-8")) else: diff --git a/conda/core/prefix_data.py b/conda/core/prefix_data.py index 49008aec187..015b08756ef 100644 --- a/conda/core/prefix_data.py +++ b/conda/core/prefix_data.py @@ -1,14 +1,18 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import json -from logging import getLogger import os -from os.path import basename, isdir, isfile, join, lexists import re +from logging import getLogger +from os.path import basename, isdir, isfile, join, lexists -from ..base.constants import PREFIX_STATE_FILE from ..auxlib.exceptions import ValidationError -from ..base.constants import CONDA_PACKAGE_EXTENSIONS, PREFIX_MAGIC_FILE, CONDA_ENV_VARS_UNSET_VAR +from ..base.constants import ( + CONDA_ENV_VARS_UNSET_VAR, + CONDA_PACKAGE_EXTENSIONS, + PREFIX_MAGIC_FILE, + PREFIX_STATE_FILE, +) from ..base.context import context from ..common.constants import NULL from ..common.io import time_recorder @@ -16,7 +20,10 @@ from ..common.pkg_formats.python import get_site_packages_anchor_files from ..common.serialize import json_load from ..exceptions import ( - BasicClobberError, CondaDependencyError, CorruptedEnvironmentError, maybe_raise, + BasicClobberError, + CondaDependencyError, + CorruptedEnvironmentError, + maybe_raise, ) from ..gateways.disk.create import write_as_json_to_file from ..gateways.disk.delete import rm_rf @@ -52,18 +59,20 @@ def __init__(self, prefix_path, pip_interop_enabled=None): self.prefix_path = prefix_path self.__prefix_records = None self.__is_writable = NULL - self._pip_interop_enabled = (pip_interop_enabled - if pip_interop_enabled is not None - else context.pip_interop_enabled) + self._pip_interop_enabled = ( + pip_interop_enabled + if pip_interop_enabled is not None + else context.pip_interop_enabled + ) @time_recorder(module_name=__name__) def load(self): self.__prefix_records = {} - _conda_meta_dir = join(self.prefix_path, 'conda-meta') + _conda_meta_dir = join(self.prefix_path, "conda-meta") if lexists(_conda_meta_dir): conda_meta_json_paths = ( - p for p in - (entry.path for entry in os.scandir(_conda_meta_dir)) + p + for p in (entry.path for entry in os.scandir(_conda_meta_dir)) if p[-5:] == ".json" ) for meta_file in conda_meta_json_paths: @@ -79,28 +88,35 @@ def _get_json_fn(self, prefix_record): fn = prefix_record.fn known_ext = False # .dist-info is for things installed by pip - for ext in CONDA_PACKAGE_EXTENSIONS + ('.dist-info',): + for ext in CONDA_PACKAGE_EXTENSIONS + (".dist-info",): if fn.endswith(ext): - fn = fn.replace(ext, '') + fn = fn.replace(ext, "") known_ext = True if not known_ext: - raise ValueError("Attempted to make prefix record for unknown package type: %s" % fn) - return fn + '.json' + raise ValueError( + "Attempted to make prefix record for unknown package type: %s" % fn + ) + return fn + ".json" def insert(self, prefix_record): - assert prefix_record.name not in self._prefix_records, \ - "Prefix record insertion error: a record with name %s already exists " \ - "in the prefix. This is a bug in conda. Please report it at " \ + assert prefix_record.name not in self._prefix_records, ( + "Prefix record insertion error: a record with name %s already exists " + "in the prefix. This is a bug in conda. Please report it at " "https://github.com/conda/conda/issues" % prefix_record.name + ) - prefix_record_json_path = join(self.prefix_path, 'conda-meta', - self._get_json_fn(prefix_record)) + prefix_record_json_path = join( + self.prefix_path, "conda-meta", self._get_json_fn(prefix_record) + ) if lexists(prefix_record_json_path): - maybe_raise(BasicClobberError( - source_path=None, - target_path=prefix_record_json_path, - context=context, - ), context) + maybe_raise( + BasicClobberError( + source_path=None, + target_path=prefix_record_json_path, + context=context, + ), + context, + ) rm_rf(prefix_record_json_path) write_as_json_to_file(prefix_record_json_path, prefix_record) @@ -112,9 +128,12 @@ def remove(self, package_name): prefix_record = self._prefix_records[package_name] - prefix_record_json_path = join(self.prefix_path, 'conda-meta', - self._get_json_fn(prefix_record)) - conda_meta_full_path = join(self.prefix_path, 'conda-meta', prefix_record_json_path) + prefix_record_json_path = join( + self.prefix_path, "conda-meta", self._get_json_fn(prefix_record) + ) + conda_meta_full_path = join( + self.prefix_path, "conda-meta", prefix_record_json_path + ) if self.is_writable: rm_rf(conda_meta_full_path) @@ -151,11 +170,16 @@ def query(self, package_ref_or_match_spec): if isinstance(param, str): param = MatchSpec(param) if isinstance(param, MatchSpec): - return (prefix_rec for prefix_rec in self.iter_records() - if param.match(prefix_rec)) + return ( + prefix_rec + for prefix_rec in self.iter_records() + if param.match(prefix_rec) + ) else: assert isinstance(param, PackageRecord) - return (prefix_rec for prefix_rec in self.iter_records() if prefix_rec == param) + return ( + prefix_rec for prefix_rec in self.iter_records() if prefix_rec == param + ) @property def _prefix_records(self): @@ -169,7 +193,9 @@ def _load_single_record(self, prefix_record_json_path): except (UnicodeDecodeError, json.JSONDecodeError): # UnicodeDecodeError: catch horribly corrupt files # JSONDecodeError: catch bad json format files - raise CorruptedEnvironmentError(self.prefix_path, prefix_record_json_path) + raise CorruptedEnvironmentError( + self.prefix_path, prefix_record_json_path + ) # TODO: consider, at least in memory, storing prefix_record_json_path as part # of PrefixRecord @@ -178,11 +204,17 @@ def _load_single_record(self, prefix_record_json_path): # check that prefix record json filename conforms to name-version-build # apparently implemented as part of #2638 to resolve #2599 try: - n, v, b = basename(prefix_record_json_path)[:-5].rsplit('-', 2) - if (n, v, b) != (prefix_record.name, prefix_record.version, prefix_record.build): + n, v, b = basename(prefix_record_json_path)[:-5].rsplit("-", 2) + if (n, v, b) != ( + prefix_record.name, + prefix_record.version, + prefix_record.build, + ): raise ValueError() except ValueError: - log.warn("Ignoring malformed prefix record at: %s", prefix_record_json_path) + log.warn( + "Ignoring malformed prefix record at: %s", prefix_record_json_path + ) # TODO: consider just deleting here this record file in the future return @@ -201,15 +233,18 @@ def is_writable(self): # # REMOVE: ? def _has_python(self): - return 'python' in self._prefix_records + return "python" in self._prefix_records @property def _python_pkg_record(self): """Return the prefix record for the package python.""" return next( - (prefix_record for prefix_record in self.__prefix_records.values() - if prefix_record.name == 'python'), - None + ( + prefix_record + for prefix_record in self.__prefix_records.values() + if prefix_record.name == "python" + ), + None, ) def _load_site_packages(self): @@ -230,7 +265,9 @@ def _load_site_packages(self): if not python_pkg_record: return {} - site_packages_dir = get_python_site_packages_short_path(python_pkg_record.version) + site_packages_dir = get_python_site_packages_short_path( + python_pkg_record.version + ) site_packages_path = join(self.prefix_path, win_path_ok(site_packages_dir)) if not isdir(site_packages_path): @@ -245,7 +282,9 @@ def _load_site_packages(self): # Get all anchor files and compare against conda anchor files to find clobbered conda # packages and python packages installed via other means (not handled by conda) - sp_anchor_files = get_site_packages_anchor_files(site_packages_path, site_packages_dir) + sp_anchor_files = get_site_packages_anchor_files( + site_packages_path, site_packages_dir + ) conda_anchor_files = set(conda_python_packages) clobbered_conda_anchor_files = conda_anchor_files - sp_anchor_files non_conda_anchor_files = sp_anchor_files - conda_anchor_files @@ -255,20 +294,24 @@ def _load_site_packages(self): # the in-memory record for the conda package. In the future, we should consider # also deleting the record on disk in the conda-meta/ directory. for conda_anchor_file in clobbered_conda_anchor_files: - prefix_rec = self._prefix_records.pop(conda_python_packages[conda_anchor_file].name) + prefix_rec = self._prefix_records.pop( + conda_python_packages[conda_anchor_file].name + ) try: extracted_package_dir = basename(prefix_rec.extracted_package_dir) except AttributeError: - extracted_package_dir = "-".join(( - prefix_rec.name, prefix_rec.version, prefix_rec.build - )) + extracted_package_dir = "-".join( + (prefix_rec.name, prefix_rec.version, prefix_rec.build) + ) prefix_rec_json_path = join( - self.prefix_path, "conda-meta", '%s.json' % extracted_package_dir + self.prefix_path, "conda-meta", "%s.json" % extracted_package_dir ) try: rm_rf(prefix_rec_json_path) except OSError: - log.debug("stale information, but couldn't remove: %s", prefix_rec_json_path) + log.debug( + "stale information, but couldn't remove: %s", prefix_rec_json_path + ) else: log.debug("removed due to stale information: %s", prefix_rec_json_path) @@ -276,18 +319,27 @@ def _load_site_packages(self): new_packages = {} for af in non_conda_anchor_files: try: - python_record = read_python_record(self.prefix_path, af, python_pkg_record.version) + python_record = read_python_record( + self.prefix_path, af, python_pkg_record.version + ) except OSError as e: - log.info("Python record ignored for anchor path '%s'\n due to %s", af, e) + log.info( + "Python record ignored for anchor path '%s'\n due to %s", af, e + ) continue except ValidationError: import sys + exc_type, exc_value, exc_traceback = sys.exc_info() import traceback + tb = traceback.format_exception(exc_type, exc_value, exc_traceback) - log.warn("Problem reading non-conda package record at %s. Please verify that you " - "still need this, and if so, that this is still installed correctly. " - "Reinstalling this package may help.", af) + log.warn( + "Problem reading non-conda package record at %s. Please verify that you " + "still need this, and if so, that this is still installed correctly. " + "Reinstalling this package may help.", + af, + ) log.debug("ValidationError: \n%s\n", "\n".join(tb)) continue if not python_record: @@ -308,45 +360,47 @@ def _get_environment_state_file(self): def _write_environment_state_file(self, state): env_vars_file = join(self.prefix_path, PREFIX_STATE_FILE) - with open(env_vars_file, 'w') as f: + with open(env_vars_file, "w") as f: f.write(json.dumps(state, ensure_ascii=False, default=lambda x: x.__dict__)) def get_environment_env_vars(self): prefix_state = self._get_environment_state_file() env_vars_all = dict(prefix_state.get("env_vars", {})) - env_vars = {k: v for k, v in env_vars_all.items() if v != CONDA_ENV_VARS_UNSET_VAR} + env_vars = { + k: v for k, v in env_vars_all.items() if v != CONDA_ENV_VARS_UNSET_VAR + } return env_vars def set_environment_env_vars(self, env_vars): env_state_file = self._get_environment_state_file() - current_env_vars = env_state_file.get('env_vars') + current_env_vars = env_state_file.get("env_vars") if current_env_vars: current_env_vars.update(env_vars) else: - env_state_file['env_vars'] = env_vars + env_state_file["env_vars"] = env_vars self._write_environment_state_file(env_state_file) - return env_state_file.get('env_vars') + return env_state_file.get("env_vars") def unset_environment_env_vars(self, env_vars): env_state_file = self._get_environment_state_file() - current_env_vars = env_state_file.get('env_vars') + current_env_vars = env_state_file.get("env_vars") if current_env_vars: for env_var in env_vars: if env_var in current_env_vars.keys(): current_env_vars[env_var] = CONDA_ENV_VARS_UNSET_VAR self._write_environment_state_file(env_state_file) - return env_state_file.get('env_vars') + return env_state_file.get("env_vars") def get_conda_anchor_files_and_records(site_packages_short_path, python_records): """Return the anchor files for the conda records of python packages.""" - anchor_file_endings = ('.egg-info/PKG-INFO', '.dist-info/RECORD', '.egg-info') + anchor_file_endings = (".egg-info/PKG-INFO", ".dist-info/RECORD", ".egg-info") conda_python_packages = {} matcher = re.compile( r"^{}/[^/]+(?:{})$".format( re.escape(site_packages_short_path), - r"|".join(re.escape(fn) for fn in anchor_file_endings) + r"|".join(re.escape(fn) for fn in anchor_file_endings), ) ).match @@ -354,8 +408,11 @@ def get_conda_anchor_files_and_records(site_packages_short_path, python_records) anchor_paths = tuple(fpath for fpath in prefix_record.files if matcher(fpath)) if len(anchor_paths) > 1: anchor_path = sorted(anchor_paths, key=len)[0] - log.info("Package %s has multiple python anchor files.\n" - " Using %s", prefix_record.record_id(), anchor_path) + log.info( + "Package %s has multiple python anchor files.\n" " Using %s", + prefix_record.record_id(), + anchor_path, + ) conda_python_packages[anchor_path] = prefix_record elif anchor_paths: conda_python_packages[anchor_paths[0]] = prefix_record @@ -365,13 +422,17 @@ def get_conda_anchor_files_and_records(site_packages_short_path, python_records) def get_python_version_for_prefix(prefix): # returns a string e.g. "2.7", "3.4", "3.5" or None - py_record_iter = (rcrd for rcrd in PrefixData(prefix).iter_records() if rcrd.name == 'python') + py_record_iter = ( + rcrd for rcrd in PrefixData(prefix).iter_records() if rcrd.name == "python" + ) record = next(py_record_iter, None) if record is None: return None next_record = next(py_record_iter, None) if next_record is not None: - raise CondaDependencyError("multiple python records found in prefix %s" % prefix) + raise CondaDependencyError( + "multiple python records found in prefix %s" % prefix + ) elif record.version[3].isdigit(): return record.version[:4] else: @@ -379,10 +440,15 @@ def get_python_version_for_prefix(prefix): def delete_prefix_from_linked_data(path): - '''Here, path may be a complete prefix or a dist inside a prefix''' - linked_data_path = next((key for key in sorted(PrefixData._cache_, reverse=True) - if path.startswith(key)), - None) + """Here, path may be a complete prefix or a dist inside a prefix""" + linked_data_path = next( + ( + key + for key in sorted(PrefixData._cache_, reverse=True) + if path.startswith(key) + ), + None, + ) if linked_data_path: del PrefixData._cache_[linked_data_path] return True diff --git a/conda/core/solve.py b/conda/core/solve.py index e9bb4addc35..dfeccf53254 100644 --- a/conda/core/solve.py +++ b/conda/core/solve.py @@ -1,14 +1,14 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import copy -from genericpath import exists +import sys from itertools import chain from logging import DEBUG, getLogger from os.path import join -import sys from textwrap import dedent +from genericpath import exists + try: from boltons.setutils import IndexedSet except ImportError: # pragma: no cover @@ -16,21 +16,21 @@ from conda.common.iterators import groupby_to_dict as groupby -from .index import get_reduced_index, _supplement_index_with_system -from .link import PrefixSetup, UnlinkLinkTransaction -from .prefix_data import PrefixData -from .subdir_data import SubdirData -from .. import CondaError, __version__ as CONDA_VERSION -from ..deprecations import deprecated +from .. import CondaError +from .. import __version__ as CONDA_VERSION from ..auxlib.decorators import memoizedproperty from ..auxlib.ish import dals -from ..base.constants import DepsModifier, UNKNOWN_CHANNEL, UpdateModifier, REPODATA_FN +from ..base.constants import REPODATA_FN, UNKNOWN_CHANNEL, DepsModifier, UpdateModifier from ..base.context import context from ..common.constants import NULL from ..common.io import Spinner, dashlist, time_recorder from ..common.path import get_major_minor_version, paths_equal -from ..exceptions import (PackagesNotFoundError, SpecsConfigurationConflictError, - UnsatisfiableError) +from ..deprecations import deprecated +from ..exceptions import ( + PackagesNotFoundError, + SpecsConfigurationConflictError, + UnsatisfiableError, +) from ..history import History from ..models.channel import Channel from ..models.enums import NoarchType @@ -38,6 +38,10 @@ from ..models.prefix_graph import PrefixGraph from ..models.version import VersionOrder from ..resolve import Resolve +from .index import _supplement_index_with_system, get_reduced_index +from .link import PrefixSetup, UnlinkLinkTransaction +from .prefix_data import PrefixData +from .subdir_data import SubdirData log = getLogger(__name__) @@ -67,8 +71,16 @@ class Solver: """ - def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remove=(), - repodata_fn=REPODATA_FN, command=NULL): + def __init__( + self, + prefix, + channels, + subdirs=(), + specs_to_add=(), + specs_to_remove=(), + repodata_fn=REPODATA_FN, + command=NULL, + ): """ Args: prefix (str): @@ -101,9 +113,16 @@ def __init__(self, prefix, channels, subdirs=(), specs_to_add=(), specs_to_remov self._prepared = False self._pool_cache = {} - def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=NULL, - should_retry_solve=False): + def solve_for_transaction( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=NULL, + should_retry_solve=False, + ): """Gives an UnlinkLinkTransaction instance that can be used to execute the solution on an environment. @@ -131,21 +150,39 @@ def solve_for_transaction(self, update_modifier=NULL, deps_modifier=NULL, prune= # the integration level in the PrivateEnvIntegrationTests in test_create.py. raise NotImplementedError() else: - unlink_precs, link_precs = self.solve_for_diff(update_modifier, deps_modifier, - prune, ignore_pinned, - force_remove, force_reinstall, - should_retry_solve) - stp = PrefixSetup(self.prefix, unlink_precs, link_precs, - self.specs_to_remove, self.specs_to_add, self.neutered_specs) + unlink_precs, link_precs = self.solve_for_diff( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + force_reinstall, + should_retry_solve, + ) + stp = PrefixSetup( + self.prefix, + unlink_precs, + link_precs, + self.specs_to_remove, + self.specs_to_add, + self.neutered_specs, + ) # TODO: Only explicitly requested remove and update specs are being included in # History right now. Do we need to include other categories from the solve? self._notify_conda_outdated(link_precs) return UnlinkLinkTransaction(stp) - def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, force_reinstall=NULL, - should_retry_solve=False): + def solve_for_diff( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + force_reinstall=NULL, + should_retry_solve=False, + ): """Gives the package references to remove from an environment, followed by the package references to add to an environment. @@ -174,22 +211,39 @@ def solve_for_diff(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, dependency order from roots to leaves. """ - final_precs = self.solve_final_state(update_modifier, deps_modifier, prune, ignore_pinned, - force_remove, should_retry_solve) + final_precs = self.solve_final_state( + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + should_retry_solve, + ) unlink_precs, link_precs = diff_for_unlink_link_precs( self.prefix, final_precs, self.specs_to_add, force_reinstall ) # assert that all unlink_precs are manageable - unmanageable = groupby(lambda prec: prec.is_unmanageable, unlink_precs).get(True) + unmanageable = groupby(lambda prec: prec.is_unmanageable, unlink_precs).get( + True + ) if unmanageable: - raise RuntimeError("Cannot unlink unmanageable packages:%s" - % dashlist(prec.record_id() for prec in unmanageable)) + raise RuntimeError( + "Cannot unlink unmanageable packages:%s" + % dashlist(prec.record_id() for prec in unmanageable) + ) return unlink_precs, link_precs - def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL, - ignore_pinned=NULL, force_remove=NULL, should_retry_solve=False): + def solve_final_state( + self, + update_modifier=NULL, + deps_modifier=NULL, + prune=NULL, + ignore_pinned=NULL, + force_remove=NULL, + should_retry_solve=False, + ): """Gives the final, solved state of the environment. Args: @@ -234,19 +288,32 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL deps_modifier = context.deps_modifier else: deps_modifier = DepsModifier(str(deps_modifier).lower()) - ignore_pinned = context.ignore_pinned if ignore_pinned is NULL else ignore_pinned + ignore_pinned = ( + context.ignore_pinned if ignore_pinned is NULL else ignore_pinned + ) force_remove = context.force_remove if force_remove is NULL else force_remove - log.debug("solving prefix %s\n" - " specs_to_remove: %s\n" - " specs_to_add: %s\n" - " prune: %s", self.prefix, self.specs_to_remove, self.specs_to_add, prune) + log.debug( + "solving prefix %s\n" + " specs_to_remove: %s\n" + " specs_to_add: %s\n" + " prune: %s", + self.prefix, + self.specs_to_remove, + self.specs_to_add, + prune, + ) - retrying = hasattr(self, 'ssc') + retrying = hasattr(self, "ssc") if not retrying: ssc = SolverStateContainer( - self.prefix, update_modifier, deps_modifier, prune, ignore_pinned, force_remove, + self.prefix, + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, should_retry_solve, ) self.ssc = ssc @@ -260,13 +327,19 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL if self.specs_to_remove and force_remove: if self.specs_to_add: raise NotImplementedError() - solution = tuple(prec for prec in ssc.solution_precs - if not any(spec.match(prec) for spec in self.specs_to_remove)) + solution = tuple( + prec + for prec in ssc.solution_precs + if not any(spec.match(prec) for spec in self.specs_to_remove) + ) return IndexedSet(PrefixGraph(solution).graph) # Check if specs are satisfied by current environment. If they are, exit early. - if (update_modifier == UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE - and not self.specs_to_remove and not prune): + if ( + update_modifier == UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE + and not self.specs_to_remove + and not prune + ): for spec in self.specs_to_add: if not next(ssc.prefix_data.query(spec), None): break @@ -276,71 +349,97 @@ def solve_final_state(self, update_modifier=NULL, deps_modifier=NULL, prune=NULL return IndexedSet(PrefixGraph(ssc.solution_precs).graph) if not ssc.r: - with Spinner("Collecting package metadata (%s)" % self._repodata_fn, - (not context.verbosity and not context.quiet and not retrying), - context.json): + with Spinner( + "Collecting package metadata (%s)" % self._repodata_fn, + (not context.verbosity and not context.quiet and not retrying), + context.json, + ): ssc = self._collect_all_metadata(ssc) if should_retry_solve and update_modifier == UpdateModifier.FREEZE_INSTALLED: - fail_message = "failed with initial frozen solve. Retrying with flexible solve.\n" + fail_message = ( + "failed with initial frozen solve. Retrying with flexible solve.\n" + ) elif self._repodata_fn != REPODATA_FN: - fail_message = ("failed with repodata from %s, will retry with next repodata" - " source.\n" % self._repodata_fn) + fail_message = ( + "failed with repodata from %s, will retry with next repodata" + " source.\n" % self._repodata_fn + ) else: fail_message = "failed\n" - with Spinner("Solving environment", not context.verbosity and not context.quiet, - context.json, fail_message=fail_message): + with Spinner( + "Solving environment", + not context.verbosity and not context.quiet, + context.json, + fail_message=fail_message, + ): ssc = self._remove_specs(ssc) ssc = self._add_specs(ssc) solution_precs = copy.copy(ssc.solution_precs) - pre_packages = self.get_request_package_in_solution(ssc.solution_precs, ssc.specs_map) + pre_packages = self.get_request_package_in_solution( + ssc.solution_precs, ssc.specs_map + ) ssc = self._find_inconsistent_packages(ssc) # this will prune precs that are deps of precs that get removed due to conflicts ssc = self._run_sat(ssc) - post_packages = self.get_request_package_in_solution(ssc.solution_precs, ssc.specs_map) + post_packages = self.get_request_package_in_solution( + ssc.solution_precs, ssc.specs_map + ) if ssc.update_modifier == UpdateModifier.UPDATE_SPECS: constrained = self.get_constrained_packages( - pre_packages, post_packages, ssc.index.keys()) + pre_packages, post_packages, ssc.index.keys() + ) if len(constrained) > 0: for spec in constrained: self.determine_constricting_specs(spec, ssc.solution_precs) # if there were any conflicts, we need to add their orphaned deps back in if ssc.add_back_map: - orphan_precs = (set(solution_precs) - - set(ssc.solution_precs) - - set(ssc.add_back_map)) + orphan_precs = ( + set(solution_precs) + - set(ssc.solution_precs) + - set(ssc.add_back_map) + ) solution_prec_names = [_.name for _ in ssc.solution_precs] ssc.solution_precs.extend( - [_ for _ in orphan_precs - if _.name not in ssc.specs_map and _.name not in solution_prec_names]) + [ + _ + for _ in orphan_precs + if _.name not in ssc.specs_map + and _.name not in solution_prec_names + ] + ) ssc = self._post_sat_handling(ssc) time_recorder.log_totals() ssc.solution_precs = IndexedSet(PrefixGraph(ssc.solution_precs).graph) - log.debug("solved prefix %s\n" - " solved_linked_dists:\n" - " %s\n", - self.prefix, "\n ".join(prec.dist_str() for prec in ssc.solution_precs)) + log.debug( + "solved prefix %s\n" " solved_linked_dists:\n" " %s\n", + self.prefix, + "\n ".join(prec.dist_str() for prec in ssc.solution_precs), + ) return ssc.solution_precs def determine_constricting_specs(self, spec, solution_precs): - highest_version = [VersionOrder(sp.version) for sp in solution_precs - if sp.name == spec.name][0] + highest_version = [ + VersionOrder(sp.version) for sp in solution_precs if sp.name == spec.name + ][0] constricting = [] for prec in solution_precs: if any(j for j in prec.depends if spec.name in j): for dep in prec.depends: m_dep = MatchSpec(dep) - if m_dep.name == spec.name and \ - m_dep.version is not None and \ - (m_dep.version.exact_value or "<" in m_dep.version.spec): + if ( + m_dep.name == spec.name + and m_dep.version is not None + and (m_dep.version.exact_value or "<" in m_dep.version.spec) + ): if "," in m_dep.version.spec: constricting.extend( [ @@ -352,17 +451,24 @@ def determine_constricting_specs(self, spec, solution_precs): else: constricting.append((prec.name, m_dep)) - hard_constricting = [i for i in constricting if i[1].version.matcher_vo <= highest_version] + hard_constricting = [ + i for i in constricting if i[1].version.matcher_vo <= highest_version + ] if len(hard_constricting) == 0: return None print(f"\n\nUpdating {spec.name} is constricted by \n") for const in hard_constricting: - print("{package} -> requires {conflict_dep}".format( - package=const[0], conflict_dep=const[1])) - print("\nIf you are sure you want an update of your package either try " - "`conda update --all` or install a specific version of the " - "package you want using `conda install =`\n") + print( + "{package} -> requires {conflict_dep}".format( + package=const[0], conflict_dep=const[1] + ) + ) + print( + "\nIf you are sure you want an update of your package either try " + "`conda update --all` or install a specific version of the " + "package you want using `conda install =`\n" + ) return hard_constricting def get_request_package_in_solution(self, solution_precs, specs_map): @@ -371,12 +477,17 @@ def get_request_package_in_solution(self, solution_precs, specs_map): update_pkg_request = pkg.name requested_packages[update_pkg_request] = [ - (i.name, str(i.version)) for i in solution_precs + (i.name, str(i.version)) + for i in solution_precs if i.name == update_pkg_request and i.version is not None ] requested_packages[update_pkg_request].extend( - [(v.name, str(v.version)) for k, v in specs_map.items() - if k == update_pkg_request and v.version is not None]) + [ + (v.name, str(v.version)) + for k, v in specs_map.items() + if k == update_pkg_request and v.version is not None + ] + ) return requested_packages @@ -393,10 +504,12 @@ def empty_package_list(pkg): return update_constrained for pkg in self.specs_to_add: - if pkg.name.startswith('__'): # ignore virtual packages + if pkg.name.startswith("__"): # ignore virtual packages continue current_version = max(i[1] for i in pre_packages[pkg.name]) - if current_version == max(i.version for i in index_keys if i.name == pkg.name): + if current_version == max( + i.version for i in index_keys if i.name == pkg.name + ): continue else: if post_packages == pre_packages: @@ -411,8 +524,14 @@ def _collect_all_metadata(self, ssc): # these are things that we want to keep even if they're not explicitly specified. This # is to compensate for older installers not recording these appropriately for them # to be preserved. - for pkg_name in ('anaconda', 'conda', 'conda-build', 'python.app', - 'console_shortcut', 'powershell_shortcut'): + for pkg_name in ( + "anaconda", + "conda", + "conda-build", + "python.app", + "console_shortcut", + "powershell_shortcut", + ): if pkg_name not in ssc.specs_map and ssc.prefix_data.get(pkg_name, None): ssc.specs_map[pkg_name] = MatchSpec(pkg_name) @@ -420,7 +539,7 @@ def _collect_all_metadata(self, ssc): virtual_pkg_index = {} _supplement_index_with_system(virtual_pkg_index) virtual_pkgs = [p.name for p in virtual_pkg_index.keys()] - for virtual_pkgs_name in (virtual_pkgs): + for virtual_pkgs_name in virtual_pkgs: if virtual_pkgs_name not in ssc.specs_map: ssc.specs_map[virtual_pkgs_name] = MatchSpec(virtual_pkgs_name) @@ -435,9 +554,11 @@ def _collect_all_metadata(self, ssc): # declaration that it is manually installed, much like the # history map. It may still be replaced if it is in conflict, # but it is not just an indirect dep that can be pruned. - if (not ssc.specs_from_history_map - or MatchSpec(prec.name) in context.aggressive_update_packages - or prec.subdir == 'pypi'): + if ( + not ssc.specs_from_history_map + or MatchSpec(prec.name) in context.aggressive_update_packages + or prec.subdir == "pypi" + ): ssc.specs_map.update({prec.name: MatchSpec(prec.name)}) prepared_specs = { @@ -457,7 +578,9 @@ def _remove_specs(self, ssc): # SAT for spec removal determination, we can use the PrefixGraph and simple tree # traversal if we're careful about how we handle features. We still invoke sat via # `r.solve()` later. - _track_fts_specs = (spec for spec in self.specs_to_remove if "track_features" in spec) + _track_fts_specs = ( + spec for spec in self.specs_to_remove if "track_features" in spec + ) feature_names = set( chain.from_iterable( spec.get_raw_value("track_features") for spec in _track_fts_specs @@ -480,7 +603,8 @@ def _remove_specs(self, ssc): # ensure that each spec in specs_to_remove is actually associated with removed records unmatched_specs_to_remove = tuple( - spec for spec in no_removed_records_specs + spec + for spec in no_removed_records_specs if not any(spec.match(rec) for rec in all_removed_records) ) if unmatched_specs_to_remove: @@ -494,7 +618,7 @@ def _remove_specs(self, ssc): rec_has_a_feature = set(rec.features or ()) & feature_names if rec_has_a_feature and rec.name in ssc.specs_from_history_map: spec = ssc.specs_map.get(rec.name, MatchSpec(rec.name)) - spec._match_components.pop('features', None) + spec._match_components.pop("features", None) ssc.specs_map[spec.name] = spec else: ssc.specs_map.pop(rec.name, None) @@ -517,12 +641,19 @@ def _find_inconsistent_packages(self, ssc): ssc.solution_precs = tuple(ssc.index.get(k, k) for k in ssc.solution_precs) _, inconsistent_precs = ssc.r.bad_installed(ssc.solution_precs, ()) if log.isEnabledFor(DEBUG): - log.debug("inconsistent precs: %s", - dashlist(inconsistent_precs) if inconsistent_precs else 'None') + log.debug( + "inconsistent precs: %s", + dashlist(inconsistent_precs) if inconsistent_precs else "None", + ) if inconsistent_precs: - print(dedent(""" + print( + dedent( + """ The environment is inconsistent, please check the package plan carefully - The following packages are causing the inconsistency:"""), file=sys.stderr) + The following packages are causing the inconsistency:""" + ), + file=sys.stderr, + ) print(dashlist(inconsistent_precs), file=sys.stderr) for prec in inconsistent_precs: # pop and save matching spec in specs_map @@ -534,10 +665,11 @@ def _find_inconsistent_packages(self, ssc): # inconsistent environments should maintain the python version # unless explicitly requested by the user. This along with the logic in # _add_specs maintains the major.minor version - if prec.name == 'python' and spec: - ssc.specs_map['python'] = spec - ssc.solution_precs = tuple(prec for prec in ssc.solution_precs - if prec not in inconsistent_precs) + if prec.name == "python" and spec: + ssc.specs_map["python"] = spec + ssc.solution_precs = tuple( + prec for prec in ssc.solution_precs if prec not in inconsistent_precs + ) return ssc def _package_has_updates(self, ssc, spec, installed_pool): @@ -550,15 +682,22 @@ def _package_has_updates(self, ssc, spec, installed_pool): if prec.version > installed_prec.version: has_update = True break - elif (prec.version == installed_prec.version and - prec.build_number > installed_prec.build_number): + elif ( + prec.version == installed_prec.version + and prec.build_number > installed_prec.build_number + ): has_update = True break # let conda determine the latest version by just adding a name spec - return (MatchSpec(spec.name, version=prec.version, build_number=prec.build_number) - if has_update else spec) + return ( + MatchSpec(spec.name, version=prec.version, build_number=prec.build_number) + if has_update + else spec + ) - def _should_freeze(self, ssc, target_prec, conflict_specs, explicit_pool, installed_pool): + def _should_freeze( + self, ssc, target_prec, conflict_specs, explicit_pool, installed_pool + ): # never, ever freeze anything if we have no history. if not ssc.specs_from_history_map: return False @@ -568,9 +707,9 @@ def _should_freeze(self, ssc, target_prec, conflict_specs, explicit_pool, instal # if all package specs have overlapping package choices (satisfiable in at least one way) pkg_name = target_prec.name - no_conflict = (pkg_name not in conflict_specs and - (pkg_name not in explicit_pool or - target_prec in explicit_pool[pkg_name])) + no_conflict = pkg_name not in conflict_specs and ( + pkg_name not in explicit_pool or target_prec in explicit_pool[pkg_name] + ) return no_conflict @@ -593,7 +732,9 @@ def _add_specs(self, ssc): conflict_specs = ( ssc.r.get_conflicting_specs( - tuple(record.to_match_spec() for record in ssc.prefix_data.iter_records()), + tuple( + record.to_match_spec() for record in ssc.prefix_data.iter_records() + ), self.specs_to_add, ) or () @@ -601,10 +742,14 @@ def _add_specs(self, ssc): conflict_specs = {_.name for _ in conflict_specs} for pkg_name, spec in ssc.specs_map.items(): - matches_for_spec = tuple(prec for prec in ssc.solution_precs if spec.match(prec)) + matches_for_spec = tuple( + prec for prec in ssc.solution_precs if spec.match(prec) + ) if matches_for_spec: if len(matches_for_spec) != 1: - raise CondaError(dals(""" + raise CondaError( + dals( + """ Conda encountered an error with your environment. Please report an issue at https://github.com/conda/conda/issues. In your report, please include the output of 'conda info' and 'conda list' for the active environment, along @@ -612,34 +757,49 @@ def _add_specs(self, ssc): pkg_name: %s spec: %s matches_for_spec: %s - """) % (pkg_name, spec, - dashlist((str(s) for s in matches_for_spec), indent=4))) + """ + ) + % ( + pkg_name, + spec, + dashlist((str(s) for s in matches_for_spec), indent=4), + ) + ) target_prec = matches_for_spec[0] if target_prec.is_unmanageable: ssc.specs_map[pkg_name] = target_prec.to_match_spec() elif MatchSpec(pkg_name) in context.aggressive_update_packages: ssc.specs_map[pkg_name] = MatchSpec(pkg_name) - elif self._should_freeze(ssc, target_prec, conflict_specs, explicit_pool, - installed_pool): + elif self._should_freeze( + ssc, target_prec, conflict_specs, explicit_pool, installed_pool + ): ssc.specs_map[pkg_name] = target_prec.to_match_spec() elif pkg_name in ssc.specs_from_history_map: ssc.specs_map[pkg_name] = MatchSpec( ssc.specs_from_history_map[pkg_name], - target=target_prec.dist_str()) + target=target_prec.dist_str(), + ) else: - ssc.specs_map[pkg_name] = MatchSpec(pkg_name, target=target_prec.dist_str()) + ssc.specs_map[pkg_name] = MatchSpec( + pkg_name, target=target_prec.dist_str() + ) pin_overrides = set() for s in ssc.pinned_specs: if s.name in explicit_pool: if s.name not in self.specs_to_add_names and not ssc.ignore_pinned: ssc.specs_map[s.name] = MatchSpec(s, optional=False) - elif explicit_pool[s.name] & ssc.r._get_package_pool([s]).get(s.name, set()): + elif explicit_pool[s.name] & ssc.r._get_package_pool([s]).get( + s.name, set() + ): ssc.specs_map[s.name] = MatchSpec(s, optional=False) pin_overrides.add(s.name) else: - log.warn("pinned spec %s conflicts with explicit specs. " - "Overriding pinned spec.", s) + log.warn( + "pinned spec %s conflicts with explicit specs. " + "Overriding pinned spec.", + s, + ) # we want to freeze any packages in the env that are not conflicts, so that the # solve goes faster. This is kind of like an iterative solve, except rather @@ -648,13 +808,16 @@ def _add_specs(self, ssc): # optimal output all the time. It would probably also get rid of the need # to retry with an unfrozen (UPDATE_SPECS) solve. if ssc.update_modifier == UpdateModifier.FREEZE_INSTALLED: - precs = [_ for _ in ssc.prefix_data.iter_records() if _.name not in ssc.specs_map] + precs = [ + _ for _ in ssc.prefix_data.iter_records() if _.name not in ssc.specs_map + ] for prec in precs: if prec.name not in conflict_specs: ssc.specs_map[prec.name] = prec.to_match_spec() else: ssc.specs_map[prec.name] = MatchSpec( - prec.name, target=prec.to_match_spec(), optional=True) + prec.name, target=prec.to_match_spec(), optional=True + ) log.debug("specs_map with targets: %s", ssc.specs_map) # If we're in UPDATE_ALL mode, we need to drop all the constraints attached to specs, @@ -675,7 +838,7 @@ def _add_specs(self, ssc): ) for prec in ssc.prefix_data.iter_records(): # treat pip-installed stuff as explicitly installed, too. - if prec.subdir == 'pypi': + if prec.subdir == "pypi": ssc.specs_map.update({prec.name: MatchSpec(prec.name)}) else: ssc.specs_map = { @@ -691,53 +854,67 @@ def _add_specs(self, ssc): # This factors in pins and also ignores specs from the history. It is unfreezing only # for the indirect specs that otherwise conflict with update of the immediate request elif ssc.update_modifier == UpdateModifier.UPDATE_SPECS: - skip = lambda x: ((x.name not in pin_overrides and - any(x.name == _.name for _ in ssc.pinned_specs) and - not ssc.ignore_pinned) or - x.name in ssc.specs_from_history_map) + skip = lambda x: ( + ( + x.name not in pin_overrides + and any(x.name == _.name for _ in ssc.pinned_specs) + and not ssc.ignore_pinned + ) + or x.name in ssc.specs_from_history_map + ) - specs_to_add = tuple(self._package_has_updates(ssc, _, installed_pool) - for _ in self.specs_to_add if not skip(_)) + specs_to_add = tuple( + self._package_has_updates(ssc, _, installed_pool) + for _ in self.specs_to_add + if not skip(_) + ) # the index is sorted, so the first record here gives us what we want. - conflicts = ssc.r.get_conflicting_specs(tuple(MatchSpec(_) - for _ in ssc.specs_map.values()), - specs_to_add) + conflicts = ssc.r.get_conflicting_specs( + tuple(MatchSpec(_) for _ in ssc.specs_map.values()), specs_to_add + ) for conflict in conflicts or (): # neuter the spec due to a conflict - if (conflict.name in ssc.specs_map and ( + if ( + conflict.name in ssc.specs_map + and ( # add optional because any pinned specs will include it - MatchSpec(conflict, optional=True) not in ssc.pinned_specs or - ssc.ignore_pinned) and - conflict.name not in ssc.specs_from_history_map): + MatchSpec(conflict, optional=True) not in ssc.pinned_specs + or ssc.ignore_pinned + ) + and conflict.name not in ssc.specs_from_history_map + ): ssc.specs_map[conflict.name] = MatchSpec(conflict.name) # As a business rule, we never want to update python beyond the current minor version, # unless that's requested explicitly by the user (which we actively discourage). - py_in_prefix = any(_.name == 'python' for _ in ssc.solution_precs) - py_requested_explicitly = any(s.name == 'python' for s in self.specs_to_add) + py_in_prefix = any(_.name == "python" for _ in ssc.solution_precs) + py_requested_explicitly = any(s.name == "python" for s in self.specs_to_add) if py_in_prefix and not py_requested_explicitly: - python_prefix_rec = ssc.prefix_data.get('python') + python_prefix_rec = ssc.prefix_data.get("python") freeze_installed = ssc.update_modifier == UpdateModifier.FREEZE_INSTALLED - if 'python' not in conflict_specs and freeze_installed: - ssc.specs_map['python'] = python_prefix_rec.to_match_spec() + if "python" not in conflict_specs and freeze_installed: + ssc.specs_map["python"] = python_prefix_rec.to_match_spec() else: # will our prefix record conflict with any explicit spec? If so, don't add # anything here - let python float when it hasn't been explicitly specified - python_spec = ssc.specs_map.get('python', MatchSpec('python')) - if not python_spec.get('version'): - pinned_version = get_major_minor_version(python_prefix_rec.version) + '.*' + python_spec = ssc.specs_map.get("python", MatchSpec("python")) + if not python_spec.get("version"): + pinned_version = ( + get_major_minor_version(python_prefix_rec.version) + ".*" + ) python_spec = MatchSpec(python_spec, version=pinned_version) - spec_set = (python_spec, ) + tuple(self.specs_to_add) + spec_set = (python_spec,) + tuple(self.specs_to_add) if ssc.r.get_conflicting_specs(spec_set, self.specs_to_add): - if self._command != 'install' or ( - self._repodata_fn == REPODATA_FN and - (not ssc.should_retry_solve or not freeze_installed)): + if self._command != "install" or ( + self._repodata_fn == REPODATA_FN + and (not ssc.should_retry_solve or not freeze_installed) + ): # raises a hopefully helpful error message ssc.r.find_conflicts(spec_set) else: raise UnsatisfiableError({}) - ssc.specs_map['python'] = python_spec + ssc.specs_map["python"] = python_spec # For the aggressive_update_packages configuration parameter, we strip any target # that's been set. @@ -748,22 +925,28 @@ def _add_specs(self, ssc): # add in explicitly requested specs from specs_to_add # this overrides any name-matching spec already in the spec map - ssc.specs_map.update((s.name, s) for s in self.specs_to_add if s.name not in pin_overrides) + ssc.specs_map.update( + (s.name, s) for s in self.specs_to_add if s.name not in pin_overrides + ) # As a business rule, we never want to downgrade conda below the current version, # unless that's requested explicitly by the user (which we actively discourage). - if 'conda' in ssc.specs_map and paths_equal(self.prefix, context.conda_prefix): - conda_prefix_rec = ssc.prefix_data.get('conda') + if "conda" in ssc.specs_map and paths_equal(self.prefix, context.conda_prefix): + conda_prefix_rec = ssc.prefix_data.get("conda") if conda_prefix_rec: version_req = ">=%s" % conda_prefix_rec.version - conda_requested_explicitly = any(s.name == 'conda' for s in self.specs_to_add) - conda_spec = ssc.specs_map['conda'] - conda_in_specs_to_add_version = ssc.specs_map.get('conda', {}).get('version') + conda_requested_explicitly = any( + s.name == "conda" for s in self.specs_to_add + ) + conda_spec = ssc.specs_map["conda"] + conda_in_specs_to_add_version = ssc.specs_map.get("conda", {}).get( + "version" + ) if not conda_in_specs_to_add_version: conda_spec = MatchSpec(conda_spec, version=version_req) if context.auto_update_conda and not conda_requested_explicitly: - conda_spec = MatchSpec('conda', version=version_req, target=None) - ssc.specs_map['conda'] = conda_spec + conda_spec = MatchSpec("conda", version=version_req, target=None) + ssc.specs_map["conda"] = conda_spec return ssc @@ -793,13 +976,18 @@ def _run_sat(self, ssc): # may not be the only unsatisfiable subset. We may have to call get_conflicting_specs() # several times, each time making modifications to loosen constraints. - conflicting_specs = set(ssc.r.get_conflicting_specs(tuple(final_environment_specs), - self.specs_to_add) or []) + conflicting_specs = set( + ssc.r.get_conflicting_specs( + tuple(final_environment_specs), self.specs_to_add + ) + or [] + ) while conflicting_specs: specs_modified = False if log.isEnabledFor(DEBUG): log.debug( - "conflicting specs: %s", dashlist(s.target or s for s in conflicting_specs) + "conflicting specs: %s", + dashlist(s.target or s for s in conflicting_specs), ) # Are all conflicting specs in specs_map? If not, that means they're in @@ -810,57 +998,69 @@ def _run_sat(self, ssc): # pinned_specs, but we override that in _add_specs to make it # non-optional when there's a name match in the explicit package # pool - conflicting_pinned_specs = groupby(lambda s: MatchSpec(s, optional=True) - in ssc.pinned_specs, conflicting_specs) + conflicting_pinned_specs = groupby( + lambda s: MatchSpec(s, optional=True) in ssc.pinned_specs, + conflicting_specs, + ) if conflicting_pinned_specs.get(True): in_specs_map = grouped_specs.get(True, ()) pinned_conflicts = conflicting_pinned_specs.get(True, ()) - in_specs_map_or_specs_to_add = ((set(in_specs_map) | set(self.specs_to_add)) - - set(pinned_conflicts)) + in_specs_map_or_specs_to_add = ( + set(in_specs_map) | set(self.specs_to_add) + ) - set(pinned_conflicts) raise SpecsConfigurationConflictError( sorted(s.__str__() for s in in_specs_map_or_specs_to_add), sorted(s.__str__() for s in {s for s in pinned_conflicts}), - self.prefix + self.prefix, ) for spec in conflicting_specs: if spec.target and not spec.optional: specs_modified = True final_environment_specs.remove(spec) - if spec.get('version'): + if spec.get("version"): neutered_spec = MatchSpec(spec.name, version=spec.version) else: neutered_spec = MatchSpec(spec.name) final_environment_specs.add(neutered_spec) ssc.specs_map[spec.name] = neutered_spec if specs_modified: - conflicting_specs = set(ssc.r.get_conflicting_specs( - tuple(final_environment_specs), self.specs_to_add)) + conflicting_specs = set( + ssc.r.get_conflicting_specs( + tuple(final_environment_specs), self.specs_to_add + ) + ) else: # Let r.solve() use r.find_conflicts() to report conflict chains. break # Finally! We get to call SAT. if log.isEnabledFor(DEBUG): - log.debug("final specs to add: %s", - dashlist(sorted(str(s) for s in final_environment_specs))) + log.debug( + "final specs to add: %s", + dashlist(sorted(str(s) for s in final_environment_specs)), + ) # this will raise for unsatisfiable stuff. We can if not conflicting_specs or context.unsatisfiable_hints: - ssc.solution_precs = ssc.r.solve(tuple(final_environment_specs), - specs_to_add=self.specs_to_add, - history_specs=ssc.specs_from_history_map, - should_retry_solve=ssc.should_retry_solve - ) + ssc.solution_precs = ssc.r.solve( + tuple(final_environment_specs), + specs_to_add=self.specs_to_add, + history_specs=ssc.specs_from_history_map, + should_retry_solve=ssc.should_retry_solve, + ) else: # shortcut to raise an unsat error without needing another solve step when # unsatisfiable_hints is off raise UnsatisfiableError({}) - self.neutered_specs = tuple(v for k, v in ssc.specs_map.items() if - k in ssc.specs_from_history_map and - v.strictness < ssc.specs_from_history_map[k].strictness) + self.neutered_specs = tuple( + v + for k, v in ssc.specs_map.items() + if k in ssc.specs_from_history_map + and v.strictness < ssc.specs_from_history_map[k].strictness + ) # add back inconsistent packages to solution if ssc.add_back_map: @@ -871,7 +1071,9 @@ def _run_sat(self, ssc): if not spec: # filter out solution precs and reinsert the conflict. Any resolution # of the conflict should be explicit (i.e. it must be in ssc.specs_map) - ssc.solution_precs = [_ for _ in ssc.solution_precs if _.name != name] + ssc.solution_precs = [ + _ for _ in ssc.solution_precs if _.name != name + ] ssc.solution_precs.append(prec) final_environment_specs.add(spec) @@ -904,15 +1106,20 @@ def _post_sat_handling(self, ssc): if spec.match(prec) } remove_before_adding_back = {prec.name for prec in only_add_these} - _no_deps_solution = IndexedSet(prec for prec in _no_deps_solution - if prec.name not in remove_before_adding_back) + _no_deps_solution = IndexedSet( + prec + for prec in _no_deps_solution + if prec.name not in remove_before_adding_back + ) _no_deps_solution |= only_add_these ssc.solution_precs = _no_deps_solution # TODO: check if solution is satisfiable, and emit warning if it's not - elif (ssc.deps_modifier == DepsModifier.ONLY_DEPS - and ssc.update_modifier != UpdateModifier.UPDATE_DEPS): + elif ( + ssc.deps_modifier == DepsModifier.ONLY_DEPS + and ssc.update_modifier != UpdateModifier.UPDATE_DEPS + ): # Using a special instance of PrefixGraph to remove youngest child nodes that match # the original specs_to_add. It's important to remove only the *youngest* child nodes, # because a typical use might be `conda install --only-deps python=2 flask`, and in @@ -941,7 +1148,9 @@ def _post_sat_handling(self, ssc): for node in removed_nodes if node.name not in specs_to_remove_names ) - ssc.solution_precs = tuple(PrefixGraph((*graph.graph, *filter(None, add_back))).graph) + ssc.solution_precs = tuple( + PrefixGraph((*graph.graph, *filter(None, add_back))).graph + ) # TODO: check if solution is satisfiable, and emit warning if it's not @@ -959,7 +1168,9 @@ def _post_sat_handling(self, ssc): update_names = set() for spec in self.specs_to_add: node = graph.get_node_by_name(spec.name) - update_names.update(ancest_rec.name for ancest_rec in graph.all_ancestors(node)) + update_names.update( + ancest_rec.name for ancest_rec in graph.all_ancestors(node) + ) specs_map = {name: MatchSpec(name) for name in update_names} # Remove pinned_specs and any python spec (due to major-minor pinning business rule). @@ -980,7 +1191,7 @@ def _post_sat_handling(self, ssc): deps_modifier=ssc.deps_modifier, prune=ssc.prune, ignore_pinned=ssc.ignore_pinned, - force_remove=ssc.force_remove + force_remove=ssc.force_remove, ) ssc.prune = False @@ -994,7 +1205,7 @@ def _post_sat_handling(self, ssc): def _notify_conda_outdated(self, link_precs): if not context.notify_outdated_conda or context.quiet: return - current_conda_prefix_rec = PrefixData(context.conda_prefix).get('conda', None) + current_conda_prefix_rec = PrefixData(context.conda_prefix).get("conda", None) if current_conda_prefix_rec: channel_name = current_conda_prefix_rec.channel.canonical_name if channel_name == UNKNOWN_CHANNEL: @@ -1008,8 +1219,12 @@ def _notify_conda_outdated(self, link_precs): return conda_newer_precs = sorted( - SubdirData.query_all(conda_newer_spec, self.channels, self.subdirs, - repodata_fn=self._repodata_fn), + SubdirData.query_all( + conda_newer_spec, + self.channels, + self.subdirs, + repodata_fn=self._repodata_fn, + ), key=lambda x: VersionOrder(x.version) # VersionOrder is fine here rather than r.version_key because all precs # should come from the same channel @@ -1018,7 +1233,9 @@ def _notify_conda_outdated(self, link_precs): latest_version = conda_newer_precs[-1].version # If conda comes from defaults, ensure we're giving instructions to users # that should resolve release timing issues between defaults and conda-forge. - print(dedent(f""" + print( + dedent( + f""" ==> WARNING: A newer version of conda exists. <== current version: {CONDA_VERSION} @@ -1032,7 +1249,10 @@ def _notify_conda_outdated(self, link_precs): conda install conda={latest_version} - """), file=sys.stderr) + """ + ), + file=sys.stderr, + ) def _prepare(self, prepared_specs): # All of this _prepare() method is hidden away down here. Someday we may want to further @@ -1041,7 +1261,7 @@ def _prepare(self, prepared_specs): if self._prepared and prepared_specs == self._prepared_specs: return self._index, self._r - if hasattr(self, '_index') and self._index: + if hasattr(self, "_index") and self._index: # added in install_actions for conda-build back-compat self._prepared_specs = prepared_specs _supplement_index_with_system(self._index) @@ -1055,14 +1275,19 @@ def _prepare(self, prepared_specs): additional_channels = set() for spec in self.specs_to_add: # TODO: correct handling for subdir isn't yet done - channel = spec.get_exact_value('channel') + channel = spec.get_exact_value("channel") if channel: additional_channels.add(Channel(channel)) self.channels.update(additional_channels) - reduced_index = get_reduced_index(self.prefix, self.channels, - self.subdirs, prepared_specs, self._repodata_fn) + reduced_index = get_reduced_index( + self.prefix, + self.channels, + self.subdirs, + prepared_specs, + self._repodata_fn, + ) _supplement_index_with_system(reduced_index) self._prepared_specs = prepared_specs @@ -1077,8 +1302,16 @@ class SolverStateContainer: # A mutable container with defined attributes to help keep method signatures clean # and also keep track of important state variables. - def __init__(self, prefix, update_modifier, deps_modifier, prune, ignore_pinned, force_remove, - should_retry_solve): + def __init__( + self, + prefix, + update_modifier, + deps_modifier, + prune, + ignore_pinned, + force_remove, + should_retry_solve, + ): # prefix, channels, subdirs, specs_to_add, specs_to_remove # self.prefix = prefix # self.channels = channels @@ -1121,7 +1354,7 @@ def specs_from_history_map(self): @memoizedproperty def track_features_specs(self): - return tuple(MatchSpec(x + '@') for x in context.track_features) + return tuple(MatchSpec(x + "@") for x in context.track_features) @memoizedproperty def pinned_specs(self): @@ -1139,25 +1372,39 @@ def working_state_reset(self): def get_pinned_specs(prefix): """Find pinned specs from file and return a tuple of MatchSpec.""" - pinfile = join(prefix, 'conda-meta', 'pinned') + pinfile = join(prefix, "conda-meta", "pinned") if exists(pinfile): with open(pinfile) as f: - from_file = (i for i in f.read().strip().splitlines() - if i and not i.strip().startswith('#')) + from_file = ( + i + for i in f.read().strip().splitlines() + if i and not i.strip().startswith("#") + ) else: from_file = () - return tuple(MatchSpec(spec, optional=True) for spec in (*context.pinned_packages, *from_file)) + return tuple( + MatchSpec(spec, optional=True) + for spec in (*context.pinned_packages, *from_file) + ) -def diff_for_unlink_link_precs(prefix, final_precs, specs_to_add=(), force_reinstall=NULL): +def diff_for_unlink_link_precs( + prefix, final_precs, specs_to_add=(), force_reinstall=NULL +): # Ensure final_precs supports the IndexedSet interface if not isinstance(final_precs, IndexedSet): - assert hasattr(final_precs, "__getitem__"), "final_precs must support list indexing" - assert hasattr(final_precs, "__sub__"), "final_precs must support set difference" + assert hasattr( + final_precs, "__getitem__" + ), "final_precs must support list indexing" + assert hasattr( + final_precs, "__sub__" + ), "final_precs must support set difference" previous_records = IndexedSet(PrefixGraph(PrefixData(prefix).iter_records()).graph) - force_reinstall = context.force_reinstall if force_reinstall is NULL else force_reinstall + force_reinstall = ( + context.force_reinstall if force_reinstall is NULL else force_reinstall + ) unlink_precs = previous_records - final_precs link_precs = final_precs - previous_records @@ -1176,17 +1423,24 @@ def _add_to_unlink_and_link(rec): _add_to_unlink_and_link(prec) # add back 'noarch: python' packages to unlink and link if python version changes - python_spec = MatchSpec('python') - prev_python = next((rec for rec in previous_records if python_spec.match(rec)), None) + python_spec = MatchSpec("python") + prev_python = next( + (rec for rec in previous_records if python_spec.match(rec)), None + ) curr_python = next((rec for rec in final_precs if python_spec.match(rec)), None) gmm = get_major_minor_version - if prev_python and curr_python and gmm(prev_python.version) != gmm(curr_python.version): + if ( + prev_python + and curr_python + and gmm(prev_python.version) != gmm(curr_python.version) + ): noarch_python_precs = (p for p in final_precs if p.noarch == NoarchType.python) for prec in noarch_python_precs: _add_to_unlink_and_link(prec) - unlink_precs = IndexedSet(reversed(sorted(unlink_precs, - key=lambda x: previous_records.index(x)))) + unlink_precs = IndexedSet( + reversed(sorted(unlink_precs, key=lambda x: previous_records.index(x))) + ) link_precs = IndexedSet(sorted(link_precs, key=lambda x: final_precs.index(x))) return unlink_precs, link_precs diff --git a/conda/core/subdir_data.py b/conda/core/subdir_data.py index 72c3ebaa100..ddeb884a546 100644 --- a/conda/core/subdir_data.py +++ b/conda/core/subdir_data.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from __future__ import annotations import json @@ -127,12 +126,16 @@ def clear_cached_local_channel_data(cls, exclude_file=True): # This should only ever be needed during unit tests, when # CONDA_USE_ONLY_TAR_BZ2 may change during process lifetime. if exclude_file: - cls._cache_ = {k: v for k, v in cls._cache_.items() if not k[0].startswith("file://")} + cls._cache_ = { + k: v for k, v in cls._cache_.items() if not k[0].startswith("file://") + } else: cls._cache_.clear() @staticmethod - def query_all(package_ref_or_match_spec, channels=None, subdirs=None, repodata_fn=REPODATA_FN): + def query_all( + package_ref_or_match_spec, channels=None, subdirs=None, repodata_fn=REPODATA_FN + ): from .index import check_allowlist # TODO: fix in-line import # ensure that this is not called by threaded code @@ -156,17 +159,23 @@ def query_all(package_ref_or_match_spec, channels=None, subdirs=None, repodata_f def subdir_query(url): return tuple( - SubdirData(Channel(url), repodata_fn=repodata_fn).query(package_ref_or_match_spec) + SubdirData(Channel(url), repodata_fn=repodata_fn).query( + package_ref_or_match_spec + ) ) # TODO test timing with ProcessPoolExecutor Executor = ( DummyExecutor if context.debug or context.repodata_threads == 1 - else partial(ThreadLimitedThreadPoolExecutor, max_workers=context.repodata_threads) + else partial( + ThreadLimitedThreadPoolExecutor, max_workers=context.repodata_threads + ) ) with Executor() as executor: - result = tuple(chain.from_iterable(executor.map(subdir_query, channel_urls))) + result = tuple( + chain.from_iterable(executor.map(subdir_query, channel_urls)) + ) return result def query(self, package_ref_or_match_spec): @@ -191,7 +200,9 @@ def query(self, package_ref_or_match_spec): if prec == param: yield prec - def __init__(self, channel, repodata_fn=REPODATA_FN, RepoInterface=CondaRepoInterface): + def __init__( + self, channel, repodata_fn=REPODATA_FN, RepoInterface=CondaRepoInterface + ): assert channel.subdir # metaclass __init__ asserts no package_filename if channel.package_filename: # pragma: no cover @@ -236,7 +247,8 @@ def cache_path_base(self): self._cache_dir = create_cache_dir() # self.repodata_fn may change return join( - self._cache_dir, splitext(cache_fn_url(self.url_w_credentials, self.repodata_fn))[0] + self._cache_dir, + splitext(cache_fn_url(self.url_w_credentials, self.repodata_fn))[0], ) @property @@ -245,7 +257,9 @@ def url_w_repodata_fn(self): @property def cache_path_json(self): - return Path(self.cache_path_base + ("1" if context.use_only_tar_bz2 else "") + ".json") + return Path( + self.cache_path_base + ("1" if context.use_only_tar_bz2 else "") + ".json" + ) @property def cache_path_state(self): @@ -253,7 +267,9 @@ def cache_path_state(self): Out-of-band etag and other state needed by the RepoInterface. """ return Path( - self.cache_path_base + ("1" if context.use_only_tar_bz2 else "") + ".state.json" + self.cache_path_base + + ("1" if context.use_only_tar_bz2 else "") + + ".state.json" ) @property @@ -318,7 +334,9 @@ def _load(self): # it and fall back to this on error? if not cache.cache_path_json.exists(): log.debug( - "No local cache found for %s at %s", self.url_w_repodata_fn, self.cache_path_json + "No local cache found for %s at %s", + self.url_w_repodata_fn, + self.cache_path_json, ) if context.use_index_cache or ( context.offline and not self.url_w_subdir.startswith("file://") @@ -331,7 +349,9 @@ def _load(self): return { "_package_records": (), "_names_index": defaultdict(list), - "_track_features_index": defaultdict(list), # Unused since early 2023 + "_track_features_index": defaultdict( + list + ), # Unused since early 2023 } else: @@ -346,7 +366,9 @@ def _load(self): return _internal_state stale = cache.stale() - if (not stale or context.offline) and not self.url_w_subdir.startswith("file://"): + if (not stale or context.offline) and not self.url_w_subdir.startswith( + "file://" + ): timeout = cache.timeout() log.debug( "Using cached repodata for %s at %s. Timeout in %d sec", @@ -358,7 +380,9 @@ def _load(self): return _internal_state log.debug( - "Local cache timed out for %s at %s", self.url_w_repodata_fn, self.cache_path_json + "Local cache timed out for %s at %s", + self.url_w_repodata_fn, + self.cache_path_json, ) try: @@ -417,7 +441,9 @@ def _load(self): raise NotWritableError(self.cache_path_json, e.errno, caused_by=e) else: raise - _internal_state = self._process_raw_repodata_str(raw_repodata_str, cache.state) + _internal_state = self._process_raw_repodata_str( + raw_repodata_str, cache.state + ) self._internal_state = _internal_state self._pickle_me() return _internal_state @@ -425,7 +451,9 @@ def _load(self): def _pickle_me(self): try: log.debug( - "Saving pickled state for %s at %s", self.url_w_repodata_fn, self.cache_path_pickle + "Saving pickled state for %s at %s", + self.url_w_repodata_fn, + self.cache_path_pickle, ) with open(self.cache_path_pickle, "wb") as fh: pickle.dump(self._internal_state, fh, pickle.HIGHEST_PROTOCOL) @@ -439,7 +467,11 @@ def _read_local_repodata(self, state: RepodataState): return _pickled_state # pickled data is bad or doesn't exist; load cached json - log.debug("Loading raw json for %s at %s", self.url_w_repodata_fn, self.cache_path_json) + log.debug( + "Loading raw json for %s at %s", + self.url_w_repodata_fn, + self.cache_path_json, + ) cache = self.repo_cache @@ -458,7 +490,9 @@ def _read_local_repodata(self, state: RepodataState): ) raise CondaError(message) else: - _internal_state = self._process_raw_repodata_str(raw_repodata_str, cache.state) + _internal_state = self._process_raw_repodata_str( + raw_repodata_str, cache.state + ) # taken care of by _process_raw_repodata(): assert self._internal_state is _internal_state self._pickle_me() @@ -470,16 +504,23 @@ def _pickle_valid_checks(self, pickled_state, mod, etag): """ yield "_url", pickled_state.get("_url"), self.url_w_credentials yield "_schannel", pickled_state.get("_schannel"), self.channel.canonical_name - yield "_add_pip", pickled_state.get("_add_pip"), context.add_pip_as_python_dependency + yield "_add_pip", pickled_state.get( + "_add_pip" + ), context.add_pip_as_python_dependency yield "_mod", pickled_state.get("_mod"), mod yield "_etag", pickled_state.get("_etag"), etag - yield "_pickle_version", pickled_state.get("_pickle_version"), REPODATA_PICKLE_VERSION + yield "_pickle_version", pickled_state.get( + "_pickle_version" + ), REPODATA_PICKLE_VERSION yield "fn", pickled_state.get("fn"), self.repodata_fn def _read_pickled(self, state: RepodataState): if not isinstance(state, RepodataState): state = RepodataState( - self.cache_path_json, self.cache_path_state, self.repodata_fn, dict=state + self.cache_path_json, + self.cache_path_state, + self.repodata_fn, + dict=state, ) if not isfile(self.cache_path_pickle) or not isfile(self.cache_path_json): @@ -514,7 +555,9 @@ def _check_pickled_valid(): return _pickled_state - def _process_raw_repodata_str(self, raw_repodata_str, state: RepodataState | None = None): + def _process_raw_repodata_str( + self, raw_repodata_str, state: RepodataState | None = None + ): """ state contains information that was previously in-band in raw_repodata_str. """ @@ -524,7 +567,10 @@ def _process_raw_repodata_str(self, raw_repodata_str, state: RepodataState | Non def _process_raw_repodata(self, repodata, state: RepodataState | None = None): if not isinstance(state, RepodataState): state = RepodataState( - self.cache_path_json, self.cache_path_state, self.repodata_fn, dict=state + self.cache_path_json, + self.cache_path_state, + self.repodata_fn, + dict=state, ) subdir = repodata.get("info", {}).get("subdir") or self.channel.subdir @@ -581,7 +627,9 @@ def _process_raw_repodata(self, repodata, state: RepodataState | None = None): channel_url = self.url_w_credentials legacy_packages = repodata.get("packages", {}) - conda_packages = {} if context.use_only_tar_bz2 else repodata.get("packages.conda", {}) + conda_packages = ( + {} if context.use_only_tar_bz2 else repodata.get("packages.conda", {}) + ) _tar_bz2 = CONDA_PACKAGE_EXTENSION_V1 use_these_legacy_keys = set(legacy_packages.keys()) - { @@ -593,7 +641,6 @@ def _process_raw_repodata(self, repodata, state: RepodataState | None = None): (((k, legacy_packages[k]) for k in use_these_legacy_keys), False), ): for fn, info in group: - # Verify metadata signature before anything else so run-time # updates to the info dictionary performed below do not # invalidate the signatures provided in metadata.json. @@ -603,7 +650,9 @@ def _process_raw_repodata(self, repodata, state: RepodataState | None = None): counterpart = fn.replace(".conda", ".tar.bz2") if counterpart in legacy_packages: info["legacy_bz2_md5"] = legacy_packages[counterpart].get("md5") - info["legacy_bz2_size"] = legacy_packages[counterpart].get("size") + info["legacy_bz2_size"] = legacy_packages[counterpart].get( + "size" + ) if ( add_pip and info["name"] == "python" @@ -613,7 +662,9 @@ def _process_raw_repodata(self, repodata, state: RepodataState | None = None): info.update(meta_in_common) if info.get("record_version", 0) > 1: log.debug( - "Ignoring record_version %d from %s", info["record_version"], info["url"] + "Ignoring record_version %d from %s", + info["record_version"], + info["url"], ) continue diff --git a/conda/deprecations.py b/conda/deprecations.py index 68ca1a7ade7..6b3917a7e1f 100644 --- a/conda/deprecations.py +++ b/conda/deprecations.py @@ -2,11 +2,11 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations +import warnings from functools import wraps from types import ModuleType -import warnings -from packaging.version import parse, Version +from packaging.version import Version, parse from .__version__ import __version__ @@ -95,7 +95,9 @@ def deprecated_decorator(func: Callable) -> Callable: remove_in, f"{func.__module__}.{func.__qualname__}({argument})", # provide a default addendum if renaming and no addendum is provided - addendum=f"Use '{rename}' instead." if rename and not addendum else addendum, + addendum=f"Use '{rename}' instead." + if rename and not addendum + else addendum, ) # alert developer that it's time to remove something @@ -209,7 +211,9 @@ def topic( :param stack: Optional stacklevel increment. """ # detect function name and generate message - category, message = self._generate_message(deprecate_in, remove_in, topic, addendum) + category, message = self._generate_message( + deprecate_in, remove_in, topic, addendum + ) # alert developer that it's time to remove something if not category: diff --git a/conda/exceptions.py b/conda/exceptions.py index 8a734d8b900..c2ba37debe2 100644 --- a/conda/exceptions.py +++ b/conda/exceptions.py @@ -1,27 +1,23 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from __future__ import annotations +import getpass +import json +import os +import sys from datetime import timedelta from errno import ENOSPC from functools import lru_cache, partial -import json from json.decoder import JSONDecodeError from logging import getLogger -import os from os.path import join -import sys from textwrap import dedent from traceback import format_exception, format_exception_only -import getpass from conda.common.iterators import groupby_to_dict as groupby -from .models.channel import Channel -from .common.url import join_url, maybe_unquote from . import CondaError, CondaExitZero, CondaMultiError -from .deprecations import DeprecatedError # noqa: 401 from .auxlib.entity import EntityEncoder from .auxlib.ish import dals from .auxlib.logz import stringify @@ -30,6 +26,9 @@ from .common.compat import ensure_text_type, on_win from .common.io import dashlist, timeout from .common.signals import get_signal_name +from .common.url import join_url, maybe_unquote +from .deprecations import DeprecatedError # noqa: 401 +from .models.channel import Channel log = getLogger(__name__) @@ -41,10 +40,16 @@ def __init__(self, bad_deps): # bad_deps is a list of lists # bad_deps should really be named 'invalid_chains' self.bad_deps = tuple(dep for deps in bad_deps for dep in deps if dep) - formatted_chains = tuple(" -> ".join(map(str, bad_chain)) for bad_chain in bad_deps) + formatted_chains = tuple( + " -> ".join(map(str, bad_chain)) for bad_chain in bad_deps + ) self._formatted_chains = formatted_chains - message = '\n' + '\n'.join((' - %s' % bad_chain) for bad_chain in formatted_chains) + message = "\n" + "\n".join( + (" - %s" % bad_chain) for bad_chain in formatted_chains + ) super().__init__(message) + + NoPackagesFound = NoPackagesFoundError = ResolvePackageNotFound # NOQA @@ -66,9 +71,9 @@ class Help(CondaError): class ActivateHelp(Help): - def __init__(self): - message = dals(""" + message = dals( + """ usage: conda activate [-h] [--[no-]stack] [env_name_or_prefix] Activate a conda environment. @@ -90,14 +95,15 @@ def __init__(self): configuration variable. --no-stack Do not stack the environment. Overrides 'auto_stack' setting. - """) + """ + ) super().__init__(message) class DeactivateHelp(Help): - def __init__(self): - message = dals(""" + message = dals( + """ usage: conda deactivate [-h] Deactivate the current active conda environment. @@ -106,12 +112,12 @@ def __init__(self): optional arguments: -h, --help Show this help message and exit. - """) + """ + ) super().__init__(message) class GenericHelp(Help): - def __init__(self, command): message = "help requested for %s" % command super().__init__(message) @@ -126,16 +132,22 @@ def __init__(self, signum): class TooManyArgumentsError(ArgumentError): - def __init__(self, expected, received, offending_arguments, optional_message='', - *args): + def __init__( + self, expected, received, offending_arguments, optional_message="", *args + ): self.expected = expected self.received = received self.offending_arguments = offending_arguments self.optional_message = optional_message - suffix = 's' if received - expected > 1 else '' - msg = ('%s Got %s argument%s (%s) but expected %s.' % - (optional_message, received, suffix, ', '.join(offending_arguments), expected)) + suffix = "s" if received - expected > 1 else "" + msg = "{} Got {} argument{} ({}) but expected {}.".format( + optional_message, + received, + suffix, + ", ".join(offending_arguments), + expected, + ) super().__init__(msg, *args) @@ -145,41 +157,57 @@ def __init__(self, message, path_conflict, **kwargs): super().__init__(message, **kwargs) def __repr__(self): - clz_name = "ClobberWarning" if self.path_conflict == PathConflict.warn else "ClobberError" + clz_name = ( + "ClobberWarning" + if self.path_conflict == PathConflict.warn + else "ClobberError" + ) return f"{clz_name}: {self}\n" class BasicClobberError(ClobberError): def __init__(self, source_path, target_path, context): - message = dals(""" + message = dals( + """ Conda was asked to clobber an existing path. source path: %(source_path)s target path: %(target_path)s - """) + """ + ) if context.path_conflict == PathConflict.prevent: message += ( "Conda no longer clobbers existing paths without the use of the " "--clobber option\n." ) super().__init__( - message, context.path_conflict, target_path=target_path, source_path=source_path + message, + context.path_conflict, + target_path=target_path, + source_path=source_path, ) class KnownPackageClobberError(ClobberError): - def __init__(self, target_path, colliding_dist_being_linked, colliding_linked_dist, context): - message = dals(""" + def __init__( + self, target_path, colliding_dist_being_linked, colliding_linked_dist, context + ): + message = dals( + """ The package '%(colliding_dist_being_linked)s' cannot be installed due to a path collision for '%(target_path)s'. This path already exists in the target prefix, and it won't be removed by an uninstall action in this transaction. The path appears to be coming from the package '%(colliding_linked_dist)s', which is already installed in the prefix. - """) + """ + ) if context.path_conflict == PathConflict.prevent: - message += ("If you'd like to proceed anyway, re-run the command with " - "the `--clobber` flag.\n.") + message += ( + "If you'd like to proceed anyway, re-run the command with " + "the `--clobber` flag.\n." + ) super().__init__( - message, context.path_conflict, + message, + context.path_conflict, target_path=target_path, colliding_dist_being_linked=colliding_dist_being_linked, colliding_linked_dist=colliding_linked_dist, @@ -188,18 +216,23 @@ def __init__(self, target_path, colliding_dist_being_linked, colliding_linked_di class UnknownPackageClobberError(ClobberError): def __init__(self, target_path, colliding_dist_being_linked, context): - message = dals(""" + message = dals( + """ The package '%(colliding_dist_being_linked)s' cannot be installed due to a path collision for '%(target_path)s'. This path already exists in the target prefix, and it won't be removed by an uninstall action in this transaction. The path is one that conda doesn't recognize. It may have been created by another package manager. - """) + """ + ) if context.path_conflict == PathConflict.prevent: - message += ("If you'd like to proceed anyway, re-run the command with " - "the `--clobber` flag.\n.") + message += ( + "If you'd like to proceed anyway, re-run the command with " + "the `--clobber` flag.\n." + ) super().__init__( - message, context.path_conflict, + message, + context.path_conflict, target_path=target_path, colliding_dist_being_linked=colliding_dist_being_linked, ) @@ -207,65 +240,79 @@ def __init__(self, target_path, colliding_dist_being_linked, context): class SharedLinkPathClobberError(ClobberError): def __init__(self, target_path, incompatible_package_dists, context): - message = dals(""" + message = dals( + """ This transaction has incompatible packages due to a shared path. packages: %(incompatible_packages)s path: '%(target_path)s' - """) + """ + ) if context.path_conflict == PathConflict.prevent: - message += ("If you'd like to proceed anyway, re-run the command with " - "the `--clobber` flag.\n.") + message += ( + "If you'd like to proceed anyway, re-run the command with " + "the `--clobber` flag.\n." + ) super().__init__( - message, context.path_conflict, + message, + context.path_conflict, target_path=target_path, - incompatible_packages=', '.join(str(d) for d in incompatible_package_dists), + incompatible_packages=", ".join(str(d) for d in incompatible_package_dists), ) class CommandNotFoundError(CondaError): def __init__(self, command): activate_commands = { - 'activate', - 'deactivate', - 'run', + "activate", + "deactivate", + "run", } conda_commands = { - 'clean', - 'config', - 'create', - '--help', # https://github.com/conda/conda/issues/11585 - 'info', - 'install', - 'list', - 'package', - 'remove', - 'search', - 'uninstall', - 'update', - 'upgrade', + "clean", + "config", + "create", + "--help", # https://github.com/conda/conda/issues/11585 + "info", + "install", + "list", + "package", + "remove", + "search", + "uninstall", + "update", + "upgrade", } build_commands = { - 'build', - 'convert', - 'develop', - 'index', - 'inspect', - 'metapackage', - 'render', - 'skeleton', + "build", + "convert", + "develop", + "index", + "inspect", + "metapackage", + "render", + "skeleton", } from .base.context import context from .cli.main import init_loggers + init_loggers(context) if command in activate_commands: # TODO: Point users to a page at conda-docs, which explains this context in more detail - builder = ["Your shell has not been properly configured to use 'conda %(command)s'."] + builder = [ + "Your shell has not been properly configured to use 'conda %(command)s'." + ] if on_win: - builder.append(dals(""" + builder.append( + dals( + """ If using 'conda %(command)s' from a batch script, change your invocation to 'CALL conda.bat %(command)s'. - """)) - builder.append(dals(""" + """ + ) + ) + builder.append( + dals( + """ To initialize your shell, run $ conda init @@ -275,17 +322,27 @@ def __init__(self, command): See 'conda init --help' for more information and options. IMPORTANT: You may need to close and restart your shell after running 'conda init'. - """) % { - 'supported_shells': dashlist(COMPATIBLE_SHELLS), - }) - message = '\n'.join(builder) + """ + ) + % { + "supported_shells": dashlist(COMPATIBLE_SHELLS), + } + ) + message = "\n".join(builder) elif command in build_commands: message = "To use 'conda %(command)s', install conda-build." else: from difflib import get_close_matches + from .cli.find_commands import find_commands + message = "No command 'conda %(command)s'." - choices = activate_commands | conda_commands | build_commands | set(find_commands()) + choices = ( + activate_commands + | conda_commands + | build_commands + | set(find_commands()) + ) close = get_close_matches(command, choices) if close: message += "\nDid you mean 'conda %s'?" % close[0] @@ -312,57 +369,64 @@ def __init__(self, location): class EnvironmentNameNotFound(CondaError): def __init__(self, environment_name): - message = dals(""" + message = dals( + """ Could not find conda environment: %(environment_name)s You can list all discoverable environments with `conda info --envs`. - """) + """ + ) super().__init__(message, environment_name=environment_name) class NoBaseEnvironmentError(CondaError): - def __init__(self): - message = dals(""" + message = dals( + """ This conda installation has no default base environment. Use 'conda create' to create new environments and 'conda activate' to activate environments. - """) + """ + ) super().__init__(message) class DirectoryNotACondaEnvironmentError(CondaError): - def __init__(self, target_directory): - message = dals(""" + message = dals( + """ The target directory exists, but it is not a conda environment. Use 'conda create' to convert the directory to a conda environment. target directory: %(target_directory)s - """) + """ + ) super().__init__(message, target_directory=target_directory) class CondaEnvironmentError(CondaError, EnvironmentError): def __init__(self, message, *args): - msg = '%s' % message + msg = "%s" % message super().__init__(msg, *args) class DryRunExit(CondaExitZero): def __init__(self): - msg = 'Dry run. Exiting.' + msg = "Dry run. Exiting." super().__init__(msg) class CondaSystemExit(CondaExitZero, SystemExit): def __init__(self, *args): - msg = ' '.join(str(arg) for arg in self.args) + msg = " ".join(str(arg) for arg in self.args) super().__init__(msg) class PaddingError(CondaError): def __init__(self, dist, placeholder, placeholder_length): - msg = ("Placeholder of length '%d' too short in package %s.\n" - "The package must be rebuilt with conda-build > 2.0." % (placeholder_length, dist)) + msg = ( + "Placeholder of length '%d' too short in package %s.\n" + "The package must be rebuilt with conda-build > 2.0." + % (placeholder_length, dist) + ) super().__init__(msg) @@ -373,24 +437,26 @@ def __init__(self, message): class CondaOSError(CondaError, OSError): def __init__(self, message, **kwargs): - msg = '%s' % message + msg = "%s" % message super().__init__(msg, **kwargs) class ProxyError(CondaError): def __init__(self): - message = dals(""" + message = dals( + """ Conda cannot proceed due to an error in your proxy configuration. Check for typos and other configuration errors in any '.netrc' file in your home directory, any environment variables ending in '_PROXY', and any other system-wide proxy configuration settings. - """) + """ + ) super().__init__(message) class CondaIOError(CondaError, IOError): def __init__(self, message, *args): - msg = '%s' % message + msg = "%s" % message super().__init__(msg) @@ -418,20 +484,20 @@ def __init__(self, channel): channel = Channel(channel) channel_name = channel.name channel_url = maybe_unquote(channel.base_url) - message = dals(""" + message = dals( + """ Channel not included in allowlist: channel name: %(channel_name)s channel url: %(channel_url)s - """) + """ + ) super().__init__(message, channel_url=channel_url, channel_name=channel_name) class UnavailableInvalidChannel(ChannelError): - status_code: str | int def __init__(self, channel, status_code, response=None): - # parse channel channel = Channel(channel) channel_name = channel.name @@ -446,7 +512,8 @@ def __init__(self, channel, status_code, response=None): You will need to adjust your conda configuration to proceed. Use `conda config --show channels` to view your configuration's current state, and use `conda config --show-sources` to view config file locations. - """) + """ + ) if channel.scheme == "file": url = join_url(channel.location, channel.name) message += dedent( @@ -455,7 +522,8 @@ def __init__(self, channel, status_code, response=None): associated `noarch/repodata.json.bz2` file, even if `noarch/repodata.json` is empty. Use `conda index {url}`, or create `noarch/repodata.json` and associated `noarch/repodata.json.bz2`. - """) + """ + ) # if response includes a valid json body we prefer the reason/message defined there try: @@ -486,20 +554,19 @@ def __init__(self, channel, status_code, response=None): class OperationNotAllowed(CondaError): - def __init__(self, message): super().__init__(message) class CondaImportError(CondaError, ImportError): def __init__(self, message): - msg = '%s' % message + msg = "%s" % message super().__init__(msg) class ParseError(CondaError): def __init__(self, message): - msg = '%s' % message + msg = "%s" % message super().__init__(msg) @@ -510,35 +577,52 @@ def __init__(self, reason): class ChecksumMismatchError(CondaError): - def __init__(self, url, target_full_path, checksum_type, expected_checksum, actual_checksum): - message = dals(""" + def __init__( + self, url, target_full_path, checksum_type, expected_checksum, actual_checksum + ): + message = dals( + """ Conda detected a mismatch between the expected content and downloaded content for url '%(url)s'. download saved to: %(target_full_path)s expected %(checksum_type)s: %(expected_checksum)s actual %(checksum_type)s: %(actual_checksum)s - """) + """ + ) url = maybe_unquote(url) super().__init__( - message, url=url, target_full_path=target_full_path, checksum_type=checksum_type, - expected_checksum=expected_checksum, actual_checksum=actual_checksum, + message, + url=url, + target_full_path=target_full_path, + checksum_type=checksum_type, + expected_checksum=expected_checksum, + actual_checksum=actual_checksum, ) class PackageNotInstalledError(CondaError): - def __init__(self, prefix, package_name): - message = dals(""" + message = dals( + """ Package is not installed in prefix. prefix: %(prefix)s package name: %(package_name)s - """) + """ + ) super().__init__(message, prefix=prefix, package_name=package_name) class CondaHTTPError(CondaError): - def __init__(self, message, url, status_code, reason, elapsed_time, response=None, - caused_by=None): + def __init__( + self, + message, + url, + status_code, + reason, + elapsed_time, + response=None, + caused_by=None, + ): # if response includes a valid json body we prefer the reason/message defined there try: body = response.json() @@ -550,11 +634,11 @@ def __init__(self, message, url, status_code, reason, elapsed_time, response=Non # standardize arguments url = maybe_unquote(url) - status_code = status_code or '000' - reason = reason or 'CONNECTION FAILED' + status_code = status_code or "000" + reason = reason or "CONNECTION FAILED" if isinstance(reason, str): reason = reason.upper() - elapsed_time = elapsed_time or '-' + elapsed_time = elapsed_time or "-" if isinstance(elapsed_time, timedelta): elapsed_time = str(elapsed_time).split(":", 1)[-1] @@ -595,13 +679,12 @@ class AuthenticationError(CondaError): class PackagesNotFoundError(CondaError): - def __init__(self, packages, channel_urls=()): - - format_list = lambda iterable: ' - ' + '\n - '.join(str(x) for x in iterable) + format_list = lambda iterable: " - " + "\n - ".join(str(x) for x in iterable) if channel_urls: - message = dals(""" + message = dals( + """ The following packages are not available from current channels: %(packages_formatted)s @@ -616,27 +699,35 @@ def __init__(self, packages, channel_urls=()): https://anaconda.org and use the search bar at the top of the page. - """) + """ + ) from .base.context import context if context.use_only_tar_bz2: - message += dals(""" + message += dals( + """ Note: 'use_only_tar_bz2' is enabled. This might be omitting some packages from the index. Set this option to 'false' and retry. - """) + """ + ) packages_formatted = format_list(packages) channels_formatted = format_list(channel_urls) else: - message = dals(""" + message = dals( + """ The following packages are missing from the target environment: %(packages_formatted)s - """) + """ + ) packages_formatted = format_list(packages) channels_formatted = () super().__init__( - message, packages=packages, packages_formatted=packages_formatted, - channel_urls=channel_urls, channels_formatted=channels_formatted + message, + packages=packages, + packages_formatted=packages_formatted, + channel_urls=channel_urls, + channels_formatted=channels_formatted, ) @@ -653,15 +744,16 @@ class UnsatisfiableError(CondaError): Raises an exception with a formatted message detailing the unsatisfiable specifications. """ + def _format_chain_str(self, bad_deps): chains = {} for dep in sorted(bad_deps, key=len, reverse=True): - dep1 = [s.partition(' ') for s in dep[1:]] + dep1 = [s.partition(" ") for s in dep[1:]] key = (dep[0],) + tuple(v[0] for v in dep1) - vals = ('',) + tuple(v[2] for v in dep1) + vals = ("",) + tuple(v[2] for v in dep1) found = False for key2, csets in chains.items(): - if key2[:len(key)] == key: + if key2[: len(key)] == key: for cset, val in zip(csets, vals): cset.add(val) found = True @@ -670,7 +762,7 @@ def _format_chain_str(self, bad_deps): for key, csets in chains.items(): deps = [] for name, cset in zip(key, csets): - if '' not in cset: + if "" not in cset: pass elif len(cset) == 1: cset.clear() @@ -679,14 +771,18 @@ def _format_chain_str(self, bad_deps): cset.add("*") if name[0] == "@": name = "feature:" + name[1:] - deps.append("{} {}".format(name, "|".join(sorted(cset))) if cset else name) + deps.append( + "{} {}".format(name, "|".join(sorted(cset))) if cset else name + ) chains[key] = " -> ".join(deps) return [chains[key] for key in sorted(chains.keys())] def __init__(self, bad_deps, chains=True, strict=False): from .models.match_spec import MatchSpec - messages = {'python': dals(''' + messages = { + "python": dals( + """ The following specifications were found to be incompatible with the existing python installation in your environment: @@ -701,33 +797,41 @@ def __init__(self, bad_deps, chains=True, strict=False): change your python version to a different minor version unless you explicitly specify that. - '''), - 'request_conflict_with_history': dals(''' + """ + ), + "request_conflict_with_history": dals( + """ The following specifications were found to be incompatible with a past explicit spec that is not an explicit spec in this operation ({ref}):\n{specs} - '''), - 'direct': dals(''' + """ + ), + "direct": dals( + """ The following specifications were found to be incompatible with each other: - '''), - 'virtual_package': dals(''' + """ + ), + "virtual_package": dals( + """ The following specifications were found to be incompatible with your system:\n{specs} Your installed version is: {ref} -''')} +""" + ), + } msg = "" self.unsatisfiable = [] if len(bad_deps) == 0: - msg += ''' + msg += """ Did not find conflicting dependencies. If you would like to know which packages conflict ensure that you have enabled unsatisfiable hints. conda config --set unsatisfiable_hints True - ''' + """ else: for class_name, dep_class in bad_deps.items(): if dep_class: @@ -745,36 +849,46 @@ def __init__(self, bad_deps, chains=True, strict=False): for dep, chain in dep_constraint_map.items(): if len(chain) > 1: msg += "\n\nPackage %s conflicts for:\n" % dep - msg += "\n".join([" -> ".join([str(i) for i in c]) for c in chain]) - self.unsatisfiable += [tuple(entries) for entries in chain] + msg += "\n".join( + [" -> ".join([str(i) for i in c]) for c in chain] + ) + self.unsatisfiable += [ + tuple(entries) for entries in chain + ] else: for dep_chain, installed_blocker in dep_class: # Remove any target values from the MatchSpecs, convert to strings - dep_chain = [str(MatchSpec(dep, target=None)) for dep in dep_chain] + dep_chain = [ + str(MatchSpec(dep, target=None)) for dep in dep_chain + ] _chains.append(dep_chain) if _chains: _chains = self._format_chain_str(_chains) else: - _chains = [', '.join(c) for c in _chains] - msg += messages[class_name].format(specs=dashlist(_chains), - ref=installed_blocker) + _chains = [", ".join(c) for c in _chains] + msg += messages[class_name].format( + specs=dashlist(_chains), ref=installed_blocker + ) if strict: - msg += ('\nNote that strict channel priority may have removed ' - 'packages required for satisfiability.') + msg += ( + "\nNote that strict channel priority may have removed " + "packages required for satisfiability." + ) super().__init__(msg) class RemoveError(CondaError): def __init__(self, message): - msg = '%s' % message + msg = "%s" % message super().__init__(msg) class DisallowedPackageError(CondaError): def __init__(self, package_ref, **kwargs): from .models.records import PackageRecord + package_ref = PackageRecord.from_objects(package_ref) message = ( "The package '%(dist_str)s' is disallowed by configuration.\n" @@ -786,32 +900,36 @@ def __init__(self, package_ref, **kwargs): class SpecsConfigurationConflictError(CondaError): - def __init__(self, requested_specs, pinned_specs, prefix): - message = dals(""" + message = dals( + """ Requested specs conflict with configured specs. requested specs: {requested_specs_formatted} pinned specs: {pinned_specs_formatted} Use 'conda config --show-sources' to look for 'pinned_specs' and 'track_features' configuration parameters. Pinned specs may also be defined in the file {pinned_specs_path}. - """).format( + """ + ).format( requested_specs_formatted=dashlist(requested_specs, 4), pinned_specs_formatted=dashlist(pinned_specs, 4), - pinned_specs_path=join(prefix, 'conda-meta', 'pinned'), + pinned_specs_path=join(prefix, "conda-meta", "pinned"), ) super().__init__( - message, requested_specs=requested_specs, pinned_specs=pinned_specs, prefix=prefix, + message, + requested_specs=requested_specs, + pinned_specs=pinned_specs, + prefix=prefix, ) + class CondaIndexError(CondaError, IndexError): def __init__(self, message): - msg = '%s' % message + msg = "%s" % message super().__init__(msg) class CondaValueError(CondaError, ValueError): - def __init__(self, message, *args, **kwargs): super().__init__(message, *args, **kwargs) @@ -819,7 +937,10 @@ def __init__(self, message, *args, **kwargs): class CyclicalDependencyError(CondaError, ValueError): def __init__(self, packages_with_cycles, **kwargs): from .models.records import PackageRecord - packages_with_cycles = tuple(PackageRecord.from_objects(p) for p in packages_with_cycles) + + packages_with_cycles = tuple( + PackageRecord.from_objects(p) for p in packages_with_cycles + ) message = "Cyclic dependencies exist among these items: %s" % dashlist( p.dist_str() for p in packages_with_cycles ) @@ -828,24 +949,26 @@ def __init__(self, packages_with_cycles, **kwargs): class CorruptedEnvironmentError(CondaError): def __init__(self, environment_location, corrupted_file, **kwargs): - message = dals(""" + message = dals( + """ The target environment has been corrupted. Corrupted environments most commonly occur when the conda process is force-terminated while in an unlink-link transaction. environment location: %(environment_location)s corrupted file: %(corrupted_file)s - """) + """ + ) super().__init__( message, environment_location=environment_location, corrupted_file=corrupted_file, - **kwargs + **kwargs, ) class CondaHistoryError(CondaError): def __init__(self, message): - msg = '%s' % message + msg = "%s" % message super().__init__(msg) @@ -872,19 +995,23 @@ def __init__(self, caused_by, **kwargs): class NotWritableError(CondaError, OSError): - def __init__(self, path, errno, **kwargs): - kwargs.update({ - 'path': path, - 'errno': errno, - }) + kwargs.update( + { + "path": path, + "errno": errno, + } + ) if on_win: - message = dals(""" + message = dals( + """ The current user does not have write permissions to a required path. path: %(path)s - """) + """ + ) else: - message = dals(""" + message = dals( + """ The current user does not have write permissions to a required path. path: %(path)s uid: %(uid)s @@ -896,51 +1023,59 @@ def __init__(self, path, errno, **kwargs): $ sudo chown %(uid)s:%(gid)s %(path)s In general, it's not advisable to use 'sudo conda'. - """) - kwargs.update({ - 'uid': os.geteuid(), - 'gid': os.getegid(), - }) + """ + ) + kwargs.update( + { + "uid": os.geteuid(), + "gid": os.getegid(), + } + ) super().__init__(message, **kwargs) self.errno = errno class NoWritableEnvsDirError(CondaError): - def __init__(self, envs_dirs, **kwargs): message = "No writeable envs directories configured.%s" % dashlist(envs_dirs) super().__init__(message, envs_dirs=envs_dirs, **kwargs) class NoWritablePkgsDirError(CondaError): - def __init__(self, pkgs_dirs, **kwargs): message = "No writeable pkgs directories configured.%s" % dashlist(pkgs_dirs) super().__init__(message, pkgs_dirs=pkgs_dirs, **kwargs) class EnvironmentNotWritableError(CondaError): - def __init__(self, environment_location, **kwargs): - kwargs.update({ - 'environment_location': environment_location, - }) + kwargs.update( + { + "environment_location": environment_location, + } + ) if on_win: - message = dals(""" + message = dals( + """ The current user does not have write permissions to the target environment. environment location: %(environment_location)s - """) + """ + ) else: - message = dals(""" + message = dals( + """ The current user does not have write permissions to the target environment. environment location: %(environment_location)s uid: %(uid)s gid: %(gid)s - """) - kwargs.update({ - 'uid': os.geteuid(), - 'gid': os.getegid(), - }) + """ + ) + kwargs.update( + { + "uid": os.geteuid(), + "gid": os.getegid(), + } + ) super().__init__(message, **kwargs) @@ -950,27 +1085,30 @@ def __init__(self, message): class BinaryPrefixReplacementError(CondaError): - def __init__(self, path, placeholder, new_prefix, original_data_length, new_data_length): - message = dals(""" + def __init__( + self, path, placeholder, new_prefix, original_data_length, new_data_length + ): + message = dals( + """ Refusing to replace mismatched data length in binary file. path: %(path)s placeholder: %(placeholder)s new prefix: %(new_prefix)s original data Length: %(original_data_length)d new data length: %(new_data_length)d - """) + """ + ) kwargs = { - 'path': path, - 'placeholder': placeholder, - 'new_prefix': new_prefix, - 'original_data_length': original_data_length, - 'new_data_length': new_data_length, + "path": path, + "placeholder": placeholder, + "new_prefix": new_prefix, + "original_data_length": original_data_length, + "new_data_length": new_data_length, } super().__init__(message, **kwargs) class InvalidSpec(CondaError, ValueError): - def __init__(self, message, **kwargs): super().__init__(message, **kwargs) @@ -988,9 +1126,10 @@ def __init__(self, invalid_spec, details): class EncodingError(CondaError): - def __init__(self, caused_by, **kwargs): - message = dals(""" + message = ( + dals( + """ A unicode encoding or decoding error has occurred. Python 2 is the interpreter under which conda is running in your base environment. Replacing your base environment with one having Python 3 may help resolve this issue. @@ -1002,12 +1141,14 @@ def __init__(self, caused_by, **kwargs): Error details: %r - """) % caused_by + """ + ) + % caused_by + ) super().__init__(message, caused_by=caused_by, **kwargs) class NoSpaceLeftError(CondaError): - def __init__(self, caused_by, **kwargs): message = "No space left on devices." super().__init__(message, caused_by=caused_by, **kwargs) @@ -1065,14 +1206,21 @@ def maybe_raise(error, context): safety_errors = groups.get(True, ()) other_errors = groups.get(False, ()) - if ((safety_errors and context.safety_checks == SafetyChecks.enabled) - or (clobber_errors and context.path_conflict == PathConflict.prevent - and not context.clobber) - or other_errors): + if ( + (safety_errors and context.safety_checks == SafetyChecks.enabled) + or ( + clobber_errors + and context.path_conflict == PathConflict.prevent + and not context.clobber + ) + or other_errors + ): raise error - elif ((safety_errors and context.safety_checks == SafetyChecks.warn) - or (clobber_errors and context.path_conflict == PathConflict.warn - and not context.clobber)): + elif (safety_errors and context.safety_checks == SafetyChecks.warn) or ( + clobber_errors + and context.path_conflict == PathConflict.warn + and not context.clobber + ): print_conda_exception(error) elif isinstance(error, ClobberError): @@ -1093,19 +1241,24 @@ def maybe_raise(error, context): def print_conda_exception(exc_val, exc_tb=None): from .base.context import context - rc = getattr(exc_val, 'return_code', None) - if (context.debug - or context.verbosity > 2 - or (not isinstance(exc_val, DryRunExit) and context.verbosity > 0)): + + rc = getattr(exc_val, "return_code", None) + if ( + context.debug + or context.verbosity > 2 + or (not isinstance(exc_val, DryRunExit) and context.verbosity > 0) + ): print(_format_exc(exc_val, exc_tb), file=sys.stderr) elif context.json: if isinstance(exc_val, DryRunExit): return - logger = getLogger('conda.stdout' if rc else 'conda.stderr') - exc_json = json.dumps(exc_val.dump_map(), indent=2, sort_keys=True, cls=EntityEncoder) + logger = getLogger("conda.stdout" if rc else "conda.stderr") + exc_json = json.dumps( + exc_val.dump_map(), indent=2, sort_keys=True, cls=EntityEncoder + ) logger.info("%s\n" % exc_json) else: - stderrlog = getLogger('conda.stderr') + stderrlog = getLogger("conda.stderr") stderrlog.error("\n%r\n", exc_val) # An alternative which would allow us not to reload sys with newly setdefaultencoding() # is to not use `%r`, e.g.: @@ -1122,11 +1275,10 @@ def _format_exc(exc_val=None, exc_tb=None): formatted_exception = format_exception(exc_type, exc_val, exc_tb) else: formatted_exception = format_exception_only(exc_type, exc_val) - return ''.join(formatted_exception) + return "".join(formatted_exception) class ExceptionHandler: - def __call__(self, func, *args, **kwargs): try: return func(*args, **kwargs) @@ -1144,16 +1296,19 @@ def write_out(self, *content): @property def http_timeout(self): from .base.context import context + return context.remote_connect_timeout_secs, context.remote_read_timeout_secs @property def user_agent(self): from .base.context import context + return context.user_agent @property def error_upload_url(self): from .base.context import context + return context.error_upload_url def handle_exception(self, exc_val, exc_tb): @@ -1163,8 +1318,10 @@ def handle_exception(self, exc_val, exc_tb): else: return self.handle_application_exception(exc_val, exc_tb) if isinstance(exc_val, EnvironmentError): - if getattr(exc_val, 'errno', None) == ENOSPC: - return self.handle_application_exception(NoSpaceLeftError(exc_val), exc_tb) + if getattr(exc_val, "errno", None) == ENOSPC: + return self.handle_application_exception( + NoSpaceLeftError(exc_val), exc_tb + ) if isinstance(exc_val, MemoryError): return self.handle_application_exception(CondaMemoryError(exc_val), exc_tb) if isinstance(exc_val, KeyboardInterrupt): @@ -1185,12 +1342,13 @@ def handle_unexpected_exception(self, exc_val, exc_tb): error_report = self.get_error_report(exc_val, exc_tb) self.print_unexpected_error_report(error_report) self._upload(error_report) - rc = getattr(exc_val, 'return_code', None) + rc = getattr(exc_val, "return_code", None) return rc if rc is not None else 1 def handle_reportable_application_exception(self, exc_val, exc_tb): error_report = self.get_error_report(exc_val, exc_tb) from .base.context import context + if context.json: error_report.update(exc_val.dump_map()) self.print_expected_error_report(error_report) @@ -1198,102 +1356,120 @@ def handle_reportable_application_exception(self, exc_val, exc_tb): return exc_val.return_code def get_error_report(self, exc_val, exc_tb): - command = ' '.join(ensure_text_type(s) for s in sys.argv) + command = " ".join(ensure_text_type(s) for s in sys.argv) info_dict = {} - if ' info' not in command: + if " info" not in command: # get info_dict, but if we get an exception here too, record it without trampling # the original exception try: from .cli.main_info import get_info_dict + info_dict = get_info_dict() except Exception as info_e: info_traceback = _format_exc() info_dict = { - 'error': repr(info_e), - 'exception_name': info_e.__class__.__name__, - 'exception_type': str(exc_val.__class__), - 'traceback': info_traceback, + "error": repr(info_e), + "exception_name": info_e.__class__.__name__, + "exception_type": str(exc_val.__class__), + "traceback": info_traceback, } error_report = { - 'error': repr(exc_val), - 'exception_name': exc_val.__class__.__name__, - 'exception_type': str(exc_val.__class__), - 'command': command, - 'traceback': _format_exc(exc_val, exc_tb), - 'conda_info': info_dict, + "error": repr(exc_val), + "exception_name": exc_val.__class__.__name__, + "exception_type": str(exc_val.__class__), + "command": command, + "traceback": _format_exc(exc_val, exc_tb), + "conda_info": info_dict, } if isinstance(exc_val, CondaError): - error_report['conda_error_components'] = exc_val.dump_map() + error_report["conda_error_components"] = exc_val.dump_map() return error_report def print_unexpected_error_report(self, error_report): from .base.context import context + if context.json: from .cli.common import stdout_json + stdout_json(error_report) else: message_builder = [] - message_builder.append('') - message_builder.append('# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<') - message_builder.append('') - message_builder.extend(' ' + line - for line in error_report['traceback'].splitlines()) - message_builder.append('') - message_builder.append('`$ %s`' % error_report['command']) - message_builder.append('') - if error_report['conda_info']: + message_builder.append("") + message_builder.append( + "# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<" + ) + message_builder.append("") + message_builder.extend( + " " + line for line in error_report["traceback"].splitlines() + ) + message_builder.append("") + message_builder.append("`$ %s`" % error_report["command"]) + message_builder.append("") + if error_report["conda_info"]: from .cli.main_info import get_env_vars_str, get_main_info_str + try: # TODO: Sanitize env vars to remove secrets (e.g credentials for PROXY) - message_builder.append(get_env_vars_str(error_report['conda_info'])) - message_builder.append(get_main_info_str(error_report['conda_info'])) + message_builder.append(get_env_vars_str(error_report["conda_info"])) + message_builder.append( + get_main_info_str(error_report["conda_info"]) + ) except Exception as e: log.warn("%r", e, exc_info=True) - message_builder.append('conda info could not be constructed.') - message_builder.append('%r' % e) - message_builder.append('') + message_builder.append("conda info could not be constructed.") + message_builder.append("%r" % e) + message_builder.append("") message_builder.append( "An unexpected error has occurred. Conda has prepared the above report." ) - message_builder.append('') + message_builder.append("") self.write_out(*message_builder) def print_expected_error_report(self, error_report): from .base.context import context + if context.json: from .cli.common import stdout_json + stdout_json(error_report) else: message_builder = [] - message_builder.append('') - message_builder.append('# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<') - message_builder.append('') - message_builder.append('`$ %s`' % error_report['command']) - message_builder.append('') - if error_report['conda_info']: + message_builder.append("") + message_builder.append( + "# >>>>>>>>>>>>>>>>>>>>>> ERROR REPORT <<<<<<<<<<<<<<<<<<<<<<" + ) + message_builder.append("") + message_builder.append("`$ %s`" % error_report["command"]) + message_builder.append("") + if error_report["conda_info"]: from .cli.main_info import get_env_vars_str, get_main_info_str + try: # TODO: Sanitize env vars to remove secrets (e.g credentials for PROXY) - message_builder.append(get_env_vars_str(error_report['conda_info'])) - message_builder.append(get_main_info_str(error_report['conda_info'])) + message_builder.append(get_env_vars_str(error_report["conda_info"])) + message_builder.append( + get_main_info_str(error_report["conda_info"]) + ) except Exception as e: log.warn("%r", e, exc_info=True) - message_builder.append('conda info could not be constructed.') - message_builder.append('%r' % e) - message_builder.append('') - message_builder.append('V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V') - message_builder.append('') + message_builder.append("conda info could not be constructed.") + message_builder.append("%r" % e) + message_builder.append("") + message_builder.append( + "V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V V" + ) + message_builder.append("") - message_builder.extend(error_report['error'].splitlines()) - message_builder.append('') + message_builder.extend(error_report["error"].splitlines()) + message_builder.append("") message_builder.append( "A reportable application error has occurred. Conda has prepared the above report." ) - message_builder.append('') + message_builder.append("") self.write_out(*message_builder) # FUTURE: Python 3.8+, replace with functools.cached_property @@ -1357,13 +1533,15 @@ def _ask_upload(self): def _execute_upload(self, error_report): headers = { - 'User-Agent': self.user_agent, + "User-Agent": self.user_agent, } _timeout = self.http_timeout username = getpass.getuser() - error_report['is_ascii'] = True if all(ord(c) < 128 for c in username) else False - error_report['has_spaces'] = True if " " in str(username) else False - data = json.dumps(error_report, sort_keys=True, cls=EntityEncoder) + '\n' + error_report["is_ascii"] = ( + True if all(ord(c) < 128 for c in username) else False + ) + error_report["has_spaces"] = True if " " in str(username) else False + data = json.dumps(error_report, sort_keys=True, cls=EntityEncoder) + "\n" data = data.replace(str(username), "USERNAME_REMOVED") response = None try: @@ -1371,22 +1549,31 @@ def _execute_upload(self, error_report): # That is, when following a 301 or 302, it turns a POST into a GET. # And no way to disable. WTF import requests + redirect_counter = 0 url = self.error_upload_url - response = requests.post(url, headers=headers, timeout=_timeout, data=data, - allow_redirects=False) + response = requests.post( + url, headers=headers, timeout=_timeout, data=data, allow_redirects=False + ) response.raise_for_status() - while response.status_code in (301, 302) and response.headers.get('Location'): - url = response.headers['Location'] - response = requests.post(url, headers=headers, timeout=_timeout, data=data, - allow_redirects=False) + while response.status_code in (301, 302) and response.headers.get( + "Location" + ): + url = response.headers["Location"] + response = requests.post( + url, + headers=headers, + timeout=_timeout, + data=data, + allow_redirects=False, + ) response.raise_for_status() redirect_counter += 1 if redirect_counter > 15: raise CondaError("Redirect limit exceeded") log.debug("upload response status: %s", response and response.status_code) except Exception as e: # pragma: no cover - log.info('%r', e) + log.info("%r", e) try: if response and response.ok: self.write_out("Upload successful.") diff --git a/conda/exports.py b/conda/exports.py index a0df9fcc8cf..88eb517bfbc 100644 --- a/conda/exports.py +++ b/conda/exports.py @@ -1,35 +1,36 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from collections.abc import Hashable as _Hashable import errno import functools import os import sys import threading +from builtins import input # noqa: F401 +from collections.abc import Hashable as _Hashable # necessary for conda-build from io import StringIO # noqa: F401 -from builtins import input # noqa: F401 from . import CondaError # noqa: F401 -from .deprecations import deprecated from .base.context import reset_context +from .deprecations import deprecated reset_context() # initialize context when conda.exports is imported from . import plan # noqa: F401 -from .core.solve import Solver # noqa: F401 -from .cli.common import specs_from_args, spec_from_line, specs_from_url # noqa: F401 -from .cli.conda_argparse import add_parser_prefix, add_parser_channels # noqa: F401 +from .cli.common import spec_from_line, specs_from_args, specs_from_url # noqa: F401 from .cli.conda_argparse import ArgumentParser # noqa: F401 +from .cli.conda_argparse import add_parser_channels, add_parser_prefix # noqa: F401 from .common import compat # noqa: F401 from .common.compat import on_win # noqa: F401 +from .common.toposort import _toposort # noqa: F401 +from .core.solve import Solver # noqa: F401 +from .gateways.connection.download import TmpDownload # noqa: F401 +from .gateways.connection.download import download as _download # noqa: F401 from .gateways.connection.session import CondaSession # noqa: F401 from .gateways.disk.create import TemporaryDirectory # noqa: F401 -from .common.toposort import _toposort # noqa: F401 from .gateways.disk.link import lchmod # noqa: F401 -from .gateways.connection.download import TmpDownload, download as _download # noqa: F401 + @deprecated("23.3", "23.9", addendum="Handled by CondaSession.") def handle_proxy_407(x, y): @@ -39,26 +40,42 @@ def handle_proxy_407(x, y): from .core.package_cache_data import rm_fetched # noqa: F401 from .gateways.disk.delete import delete_trash, move_to_trash # noqa: F401 from .misc import untracked, walk_prefix # noqa: F401 -from .resolve import MatchSpec, ResolvePackageNotFound, Resolve, Unsatisfiable # noqa: F401 +from .resolve import ( # noqa: F401 + MatchSpec, + Resolve, + ResolvePackageNotFound, + Unsatisfiable, +) NoPackagesFound = NoPackagesFoundError = ResolvePackageNotFound -from .utils import hashsum_file, md5_file, human_bytes, unix_path_to_win, url_path # noqa: F401 +import conda.base.context + +from .base.context import ( # noqa: F401 + get_prefix, + non_x86_machines, + reset_context, + sys_rc_path, +) from .common.path import win_path_to_unix # noqa: F401 from .gateways.disk.read import compute_md5sum # noqa: F401 - -from .models.version import VersionOrder, normalized_version # noqa: F401 from .models.channel import Channel # noqa: F401 -import conda.base.context -from .base.context import get_prefix, non_x86_machines, reset_context, sys_rc_path # noqa: F401 +from .models.version import VersionOrder, normalized_version # noqa: F401 +from .utils import ( # noqa: F401 + hashsum_file, + human_bytes, + md5_file, + unix_path_to_win, + url_path, +) non_x86_linux_machines = non_x86_machines from .auxlib.entity import EntityEncoder # noqa: F401 from .base.constants import ( # noqa: F401 DEFAULT_CHANNELS, - DEFAULT_CHANNELS_WIN, DEFAULT_CHANNELS_UNIX, + DEFAULT_CHANNELS_WIN, ) get_default_urls = lambda: DEFAULT_CHANNELS @@ -86,7 +103,7 @@ def handle_proxy_407(x, y): get_local_urls = lambda: list(get_conda_build_local_url()) or [] load_condarc = lambda fn: conda.base.context.reset_context([fn]) -from .exceptions import PaddingError, LinkError, CondaOSError, PathNotFoundError # NOQA +from .exceptions import CondaOSError, LinkError, PaddingError, PathNotFoundError # NOQA PaddingError = PaddingError LinkError = LinkError @@ -100,11 +117,11 @@ def handle_proxy_407(x, y): IndexRecord = PackageRecord -from .models.dist import Dist -from .gateways.subprocess import ACTIVE_SUBPROCESSES, subprocess_call # noqa: F401 -from .core.subdir_data import cache_fn_url # noqa: F401 from .core.package_cache_data import ProgressiveFetchExtract # noqa: F401 +from .core.subdir_data import cache_fn_url # noqa: F401 from .exceptions import CondaHTTPError, LockError, UnsatisfiableError # noqa: F401 +from .gateways.subprocess import ACTIVE_SUBPROCESSES, subprocess_call # noqa: F401 +from .models.dist import Dist # Replacements for six exports for compatibility PY3 = True # noqa: F401 @@ -137,6 +154,7 @@ class memoized: # pragma: no cover If called later with the same arguments, the cached value is returned (not reevaluated). """ + def __init__(self, func): self.func = func self.cache = {} @@ -164,8 +182,8 @@ def __call__(self, *args, **kw): return value -from .gateways.disk.delete import rm_rf as _rm_rf from .core.prefix_data import delete_prefix_from_linked_data +from .gateways.disk.delete import rm_rf as _rm_rf def rm_rf(path, max_retries=5, trash=True): @@ -188,36 +206,47 @@ def verify(_): return False # pragma: no cover +from .plan import display_actions as _display_actions from .plan import ( # noqa: F401 execute_actions, execute_instructions, execute_plan, install_actions, ) -from .plan import display_actions as _display_actions -def display_actions(actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=()): - if 'FETCH' in actions: - actions['FETCH'] = [index[d] for d in actions['FETCH']] - if 'LINK' in actions: - actions['LINK'] = [index[d] for d in actions['LINK']] - if 'UNLINK' in actions: - actions['UNLINK'] = [index[d] for d in actions['UNLINK']] +def display_actions( + actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=() +): + if "FETCH" in actions: + actions["FETCH"] = [index[d] for d in actions["FETCH"]] + if "LINK" in actions: + actions["LINK"] = [index[d] for d in actions["LINK"]] + if "UNLINK" in actions: + actions["UNLINK"] = [index[d] for d in actions["UNLINK"]] index = {prec: prec for prec in index.values()} - return _display_actions(actions, index, show_channel_urls, specs_to_remove, specs_to_add) - - -from .core.index import ( # noqa: F401 - dist_str_in_index, - fetch_index as _fetch_index, - get_index as _get_index, -) - - -def get_index(channel_urls=(), prepend=True, platform=None, - use_local=False, use_cache=False, unknown=None, prefix=None): - index = _get_index(channel_urls, prepend, platform, use_local, use_cache, unknown, prefix) + return _display_actions( + actions, index, show_channel_urls, specs_to_remove, specs_to_add + ) + + +from .core.index import dist_str_in_index # noqa: F401 +from .core.index import fetch_index as _fetch_index # noqa: F401 +from .core.index import get_index as _get_index + + +def get_index( + channel_urls=(), + prepend=True, + platform=None, + use_local=False, + use_cache=False, + unknown=None, + prefix=None, +): + index = _get_index( + channel_urls, prepend, platform, use_local, use_cache, unknown, prefix + ) return {Dist(prec): prec for prec in index.values()} @@ -230,9 +259,10 @@ def package_cache(): from .core.package_cache_data import PackageCacheData class package_cache: - def __contains__(self, dist): - return bool(PackageCacheData.first_writable().get(Dist(dist).to_package_ref(), None)) + return bool( + PackageCacheData.first_writable().get(Dist(dist).to_package_ref(), None) + ) def keys(self): return (Dist(v) for v in PackageCacheData.first_writable().values()) @@ -247,13 +277,15 @@ def symlink_conda(prefix, root_dir, shell=None): # pragma: no cover print("WARNING: symlink_conda() is deprecated.", file=sys.stderr) # do not symlink root env - this clobbers activate incorrectly. # prefix should always be longer than, or outside the root dir. - if os.path.normcase(os.path.normpath(prefix)) in os.path.normcase(os.path.normpath(root_dir)): + if os.path.normcase(os.path.normpath(prefix)) in os.path.normcase( + os.path.normpath(root_dir) + ): return if on_win: - where = 'condabin' + where = "condabin" symlink_fn = functools.partial(win_conda_bat_redirect, shell=shell) else: - where = 'bin' + where = "bin" symlink_fn = os.symlink if not os.path.isdir(os.path.join(prefix, where)): os.makedirs(os.path.join(prefix, where)) @@ -276,9 +308,9 @@ def _symlink_conda_hlp(prefix, root_dir, where, symlink_fn): # pragma: no cover if not os.path.lexists(prefix_file): symlink_fn(root_file, prefix_file) except OSError as e: - if (os.path.lexists(prefix_file) and (e.errno in ( - errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST - ))): + if os.path.lexists(prefix_file) and ( + e.errno in (errno.EPERM, errno.EACCES, errno.EROFS, errno.EEXIST) + ): # Cannot symlink root_file to prefix_file. Ignoring since link already exists pass else: @@ -286,6 +318,7 @@ def _symlink_conda_hlp(prefix, root_dir, where, symlink_fn): # pragma: no cover if on_win: # pragma: no cover + def win_conda_bat_redirect(src, dst, shell): """Special function for Windows XP where the `CreateSymbolicLink` function is not available. @@ -296,6 +329,7 @@ def win_conda_bat_redirect(src, dst, shell): Works of course only with callable files, e.g. `.bat` or `.exe` files. """ from .utils import shells + try: os.makedirs(os.path.dirname(dst)) except OSError as exc: # Python >2.5 @@ -305,8 +339,8 @@ def win_conda_bat_redirect(src, dst, shell): raise # bat file redirect - if not os.path.isfile(dst + '.bat'): - with open(dst + '.bat', 'w') as f: + if not os.path.isfile(dst + ".bat"): + with open(dst + ".bat", "w") as f: f.write('@echo off\ncall "%s" %%*\n' % src) # TODO: probably need one here for powershell at some point @@ -321,13 +355,13 @@ def win_conda_bat_redirect(src, dst, shell): with open(dst, "w") as f: f.write("#!/usr/bin/env bash \n") if src.endswith("conda"): - f.write('%s "$@"' % shells[shell]['path_to'](src+".exe")) + f.write('%s "$@"' % shells[shell]["path_to"](src + ".exe")) else: - f.write('source %s "$@"' % shells[shell]['path_to'](src)) + f.write('source %s "$@"' % shells[shell]["path_to"](src)) # Make the new file executable # http://stackoverflow.com/a/30463972/1170370 mode = os.stat(dst).st_mode - mode |= (mode & 292) >> 2 # copy R bits to X + mode |= (mode & 292) >> 2 # copy R bits to X os.chmod(dst, mode) @@ -337,8 +371,12 @@ def linked_data(prefix, ignore_channels=False): """ from .core.prefix_data import PrefixData from .models.dist import Dist + pd = PrefixData(prefix) - return {Dist(prefix_record): prefix_record for prefix_record in pd._prefix_records.values()} + return { + Dist(prefix_record): prefix_record + for prefix_record in pd._prefix_records.values() + } def linked(prefix, ignore_channels=False): @@ -346,9 +384,14 @@ def linked(prefix, ignore_channels=False): Return the Dists of linked packages in prefix. """ from .models.enums import PackageType + conda_package_types = PackageType.conda_package_types() ld = linked_data(prefix, ignore_channels=ignore_channels).items() - return {dist for dist, prefix_rec in ld if prefix_rec.package_type in conda_package_types} + return { + dist + for dist, prefix_rec in ld + if prefix_rec.package_type in conda_package_types + } # exports @@ -359,6 +402,7 @@ def is_linked(prefix, dist): """ # FIXME Functions that begin with `is_` should return True/False from .core.prefix_data import PrefixData + pd = PrefixData(prefix) prefix_record = pd.get(dist.name, None) if prefix_record is None: @@ -369,6 +413,14 @@ def is_linked(prefix, dist): return None -def download(url, dst_path, session=None, md5sum=None, urlstxt=False, retries=3, - sha256=None, size=None): +def download( + url, + dst_path, + session=None, + md5sum=None, + urlstxt=False, + retries=3, + sha256=None, + size=None, +): return _download(url, dst_path, md5=md5sum, sha256=sha256, size=size) diff --git a/conda/gateways/anaconda_client.py b/conda/gateways/anaconda_client.py index c54bb5945ab..5bcd35ae357 100644 --- a/conda/gateways/anaconda_client.py +++ b/conda/gateways/anaconda_client.py @@ -1,22 +1,21 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import os import re from logging import getLogger from os.path import isdir, isfile, join from stat import S_IREAD, S_IWRITE -from .disk.delete import rm_rf from .._vendor.appdirs import AppDirs from ..common.url import quote_plus, unquote_plus +from .disk.delete import rm_rf log = getLogger(__name__) def replace_first_api_with_conda(url): # replace first occurrence of 'api' with 'conda' in url - return re.sub(r'([./])api([./]|$)', r'\1conda\2', url, count=1) + return re.sub(r"([./])api([./]|$)", r"\1conda\2", url, count=1) class EnvAppDirs: @@ -44,9 +43,11 @@ def user_log_dir(self): def _get_binstar_token_directory(): if "BINSTAR_CONFIG_DIR" in os.environ: - return EnvAppDirs("binstar", "ContinuumIO", os.environ["BINSTAR_CONFIG_DIR"]).user_data_dir + return EnvAppDirs( + "binstar", "ContinuumIO", os.environ["BINSTAR_CONFIG_DIR"] + ).user_data_dir else: - return AppDirs('binstar', 'ContinuumIO').user_data_dir + return AppDirs("binstar", "ContinuumIO").user_data_dir def read_binstar_tokens(): @@ -58,7 +59,7 @@ def read_binstar_tokens(): for tkn_entry in os.scandir(token_dir): if tkn_entry.name[-6:] != ".token": continue - url = re.sub(r'\.token$', '', unquote_plus(tkn_entry.name)) + url = re.sub(r"\.token$", "", unquote_plus(tkn_entry.name)) with open(tkn_entry.path) as f: token = f.read() tokens[url] = tokens[replace_first_api_with_conda(url)] = token @@ -70,18 +71,18 @@ def set_binstar_token(url, token): if not isdir(token_dir): os.makedirs(token_dir) - tokenfile = join(token_dir, '%s.token' % quote_plus(url)) + tokenfile = join(token_dir, "%s.token" % quote_plus(url)) if isfile(tokenfile): os.unlink(tokenfile) - with open(tokenfile, 'w') as fd: + with open(tokenfile, "w") as fd: fd.write(token) os.chmod(tokenfile, S_IWRITE | S_IREAD) def remove_binstar_token(url): token_dir = _get_binstar_token_directory() - tokenfile = join(token_dir, '%s.token' % quote_plus(url)) + tokenfile = join(token_dir, "%s.token" % quote_plus(url)) rm_rf(tokenfile) diff --git a/conda/gateways/connection/__init__.py b/conda/gateways/connection/__init__.py index 130421d6910..cde75008085 100644 --- a/conda/gateways/connection/__init__.py +++ b/conda/gateways/connection/__init__.py @@ -1,42 +1,34 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - try: from requests import ConnectionError, HTTPError, Session from requests.adapters import BaseAdapter, HTTPAdapter from requests.auth import AuthBase, _basic_auth_str from requests.cookies import extract_cookies_to_jar - from requests.exceptions import ( - ChunkedEncodingError, - InvalidSchema, - SSLError, - ProxyError as RequestsProxyError, - ) + from requests.exceptions import ChunkedEncodingError, InvalidSchema + from requests.exceptions import ProxyError as RequestsProxyError + from requests.exceptions import SSLError from requests.hooks import dispatch_hook from requests.models import Response from requests.packages.urllib3.exceptions import InsecureRequestWarning + from requests.packages.urllib3.util.retry import Retry from requests.structures import CaseInsensitiveDict from requests.utils import get_auth_from_url, get_netrc_auth - from requests.packages.urllib3.util.retry import Retry except ImportError: # pragma: no cover from pip._vendor.requests import ConnectionError, HTTPError, Session from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter from pip._vendor.requests.auth import AuthBase, _basic_auth_str from pip._vendor.requests.cookies import extract_cookies_to_jar - from pip._vendor.requests.exceptions import ( - ChunkedEncodingError, - InvalidSchema, - SSLError, - ProxyError as RequestsProxyError, - ) + from pip._vendor.requests.exceptions import ChunkedEncodingError, InvalidSchema + from pip._vendor.requests.exceptions import ProxyError as RequestsProxyError + from pip._vendor.requests.exceptions import SSLError from pip._vendor.requests.hooks import dispatch_hook from pip._vendor.requests.models import Response from pip._vendor.requests.packages.urllib3.exceptions import InsecureRequestWarning + from pip._vendor.requests.packages.urllib3.util.retry import Retry from pip._vendor.requests.structures import CaseInsensitiveDict from pip._vendor.requests.utils import get_auth_from_url, get_netrc_auth - from pip._vendor.requests.packages.urllib3.util.retry import Retry dispatch_hook = dispatch_hook diff --git a/conda/gateways/connection/adapters/ftp.py b/conda/gateways/connection/adapters/ftp.py index 8d0186e998a..d000988d32d 100644 --- a/conda/gateways/connection/adapters/ftp.py +++ b/conda/gateways/connection/adapters/ftp.py @@ -17,16 +17,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from base64 import b64decode import cgi import ftplib +import os +from base64 import b64decode from io import BytesIO, StringIO from logging import getLogger -import os -from .. import BaseAdapter, Response, dispatch_hook from ....common.url import urlparse from ....exceptions import AuthenticationError +from .. import BaseAdapter, Response, dispatch_hook log = getLogger(__name__) @@ -34,6 +34,8 @@ # After: https://stackoverflow.com/a/44073062/3257826 # And: https://stackoverflow.com/a/35368154/3257826 _old_makepasv = ftplib.FTP.makepasv + + def _new_makepasv(self): host, port = _old_makepasv(self) host = self.sock.getpeername()[0] @@ -45,17 +47,20 @@ def _new_makepasv(self): class FTPAdapter(BaseAdapter): """A Requests Transport Adapter that handles FTP urls.""" + def __init__(self): super().__init__() # Build a dictionary keyed off the methods we support in upper case. # The values of this dictionary should be the functions we use to # send the specific queries. - self.func_table = {'LIST': self.list, - 'RETR': self.retr, - 'STOR': self.stor, - 'NLST': self.nlst, - 'GET': self.retr} + self.func_table = { + "LIST": self.list, + "RETR": self.retr, + "STOR": self.stor, + "NLST": self.nlst, + "GET": self.retr, + } def send(self, request, **kwargs): """Sends a PreparedRequest object over FTP. Returns a response object.""" @@ -66,7 +71,7 @@ def send(self, request, **kwargs): host, port, path = self.get_host_and_path_from_url(request) # Sort out the timeout. - timeout = kwargs.get('timeout', None) + timeout = kwargs.get("timeout", None) if not isinstance(timeout, int): # https://github.com/conda/conda/pull/3392 timeout = 10 @@ -101,7 +106,7 @@ def list(self, path, request): data.release_conn = data.close self.conn.cwd(path) - code = self.conn.retrbinary('LIST', data_callback_factory(data)) + code = self.conn.retrbinary("LIST", data_callback_factory(data)) # When that call has finished executing, we'll have all our data. response = build_text_response(request, data, code) @@ -119,7 +124,7 @@ def retr(self, path, request): # method. See self.list(). data.release_conn = data.close - code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data)) + code = self.conn.retrbinary("RETR " + path, data_callback_factory(data)) response = build_binary_response(request, data, code) @@ -143,7 +148,7 @@ def stor(self, path, request): # Switch directories and upload the data. self.conn.cwd(path) - code = self.conn.storbinary('STOR ' + filename, data) + code = self.conn.storbinary("STOR " + filename, data) # Close the connection and build the response. self.conn.close() @@ -160,7 +165,7 @@ def nlst(self, path, request): data.release_conn = data.close self.conn.cwd(path) - code = self.conn.retrbinary('NLST', data_callback_factory(data)) + code = self.conn.retrbinary("NLST", data_callback_factory(data)) # When that call has finished executing, we'll have all our data. response = build_text_response(request, data, code) @@ -175,14 +180,14 @@ def get_username_password_from_header(self, request): Basic auth to obtain the username and password. Allows the FTP adapter to piggyback on the basic auth notation without changing the control flow.""" - auth_header = request.headers.get('Authorization') + auth_header = request.headers.get("Authorization") if auth_header: # The basic auth header is of the form 'Basic xyz'. We want the # second part. Check that we have the right kind of auth though. encoded_components = auth_header.split()[:2] - if encoded_components[0] != 'Basic': - raise AuthenticationError('Invalid form of Authentication used.') + if encoded_components[0] != "Basic": + raise AuthenticationError("Invalid form of Authentication used.") else: encoded = encoded_components[1] @@ -191,7 +196,7 @@ def get_username_password_from_header(self, request): # The string is of the form 'username:password'. Split on the # colon. - components = decoded.split(':') + components = decoded.split(":") username = components[0] password = components[1] return (username, password) @@ -208,7 +213,7 @@ def get_host_and_path_from_url(self, request): path = parsed.path # If there is a slash on the front of the path, chuck it. - if path[0] == '/': + if path[0] == "/": path = path[1:] host = parsed.hostname @@ -221,6 +226,7 @@ def data_callback_factory(variable): """Returns a callback suitable for use by the FTP library. This callback will repeatedly save data into the variable provided to this function. This variable should be a file-like structure.""" + def callback(data): variable.write(data) @@ -229,12 +235,12 @@ def callback(data): def build_text_response(request, data, code): """Build a response for textual data.""" - return build_response(request, data, code, 'ascii') + return build_response(request, data, code, "ascii") def build_binary_response(request, data, code): """Build a response for data whose encoding is unknown.""" - return build_response(request, data, code, None) + return build_response(request, data, code, None) def build_response(request, data, code, encoding): @@ -254,7 +260,7 @@ def build_response(request, data, code, encoding): response.raw.seek(0) # Run the response hook. - response = dispatch_hook('response', request.hooks, response) + response = dispatch_hook("response", request.hooks, response) return response @@ -262,7 +268,7 @@ def parse_multipart_files(request): """Given a prepared request, return a file-like object containing the original data. This is pretty hacky.""" # Start by grabbing the pdict. - _, pdict = cgi.parse_header(request.headers['Content-Type']) + _, pdict = cgi.parse_header(request.headers["Content-Type"]) # Now, wrap the multipart data in a BytesIO buffer. This is annoying. buf = BytesIO() @@ -276,7 +282,7 @@ def parse_multipart_files(request): # Get a BytesIO now, and write the file into it. buf = BytesIO() - buf.write(''.join(filedata)) + buf.write("".join(filedata)) buf.seek(0) return buf @@ -305,15 +311,15 @@ def get_status_code_from_code_response(code): immediately by Space , optionally some text, and the Telnet end-of-line code." """ - last_valid_line_from_code = [line for line in code.split('\n') if line][-1] + last_valid_line_from_code = [line for line in code.split("\n") if line][-1] status_code_from_last_line = int(last_valid_line_from_code.split()[0]) status_code_from_first_digits = int(code[:3]) if status_code_from_last_line != status_code_from_first_digits: log.warning( - 'FTP response status code seems to be inconsistent.\n' - 'Code received: %s, extracted: %s and %s', + "FTP response status code seems to be inconsistent.\n" + "Code received: %s, extracted: %s and %s", code, status_code_from_last_line, - status_code_from_first_digits + status_code_from_first_digits, ) return status_code_from_last_line diff --git a/conda/gateways/connection/adapters/localfs.py b/conda/gateways/connection/adapters/localfs.py index 08a1b809fce..b6cd52d519e 100644 --- a/conda/gateways/connection/adapters/localfs.py +++ b/conda/gateways/connection/adapters/localfs.py @@ -1,23 +1,23 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from email.utils import formatdate import json +from email.utils import formatdate from logging import getLogger from mimetypes import guess_type from os import stat from tempfile import SpooledTemporaryFile -from .. import BaseAdapter, CaseInsensitiveDict, Response from ....common.compat import ensure_binary from ....common.path import url_to_path +from .. import BaseAdapter, CaseInsensitiveDict, Response log = getLogger(__name__) class LocalFSAdapter(BaseAdapter): - - def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): + def send( + self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None + ): pathname = url_to_path(request.url) resp = Response() @@ -41,11 +41,13 @@ def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxi else: modified = formatdate(stats.st_mtime, usegmt=True) content_type = guess_type(pathname)[0] or "text/plain" - resp.headers = CaseInsensitiveDict({ - "Content-Type": content_type, - "Content-Length": stats.st_size, - "Last-Modified": modified, - }) + resp.headers = CaseInsensitiveDict( + { + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + } + ) resp.raw = open(pathname, "rb") resp.close = resp.raw.close diff --git a/conda/gateways/connection/adapters/s3.py b/conda/gateways/connection/adapters/s3.py index de7be4f1ea0..8a7b67bc5e2 100644 --- a/conda/gateways/connection/adapters/s3.py +++ b/conda/gateways/connection/adapters/s3.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json from logging import LoggerAdapter, getLogger from tempfile import SpooledTemporaryFile @@ -8,28 +7,31 @@ have_boto3 = have_boto = False try: import boto3 + have_boto3 = True except ImportError: try: import boto + have_boto = True except ImportError: pass -from .. import BaseAdapter, CaseInsensitiveDict, Response from ....common.compat import ensure_binary from ....common.url import url_to_s3_info +from .. import BaseAdapter, CaseInsensitiveDict, Response log = getLogger(__name__) -stderrlog = LoggerAdapter(getLogger('conda.stderrlog'), extra=dict(terminator="\n")) +stderrlog = LoggerAdapter(getLogger("conda.stderrlog"), extra=dict(terminator="\n")) class S3Adapter(BaseAdapter): - def __init__(self): super().__init__() - def send(self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None): + def send( + self, request, stream=None, timeout=None, verify=None, cert=None, proxies=None + ): resp = Response() resp.status_code = 200 resp.url = request.url @@ -52,12 +54,13 @@ def close(self): def _send_boto3(self, boto3, resp, request): from botocore.exceptions import BotoCoreError, ClientError + bucket_name, key_string = url_to_s3_info(request.url) # https://github.com/conda/conda/issues/8993 # creating a separate boto3 session to make this thread safe session = boto3.session.Session() # create a resource client using this thread's session object - s3 = session.resource('s3') + s3 = session.resource("s3") # finally get the S3 object key = s3.Object(bucket_name, key_string[1:]) @@ -70,16 +73,20 @@ def _send_boto3(self, boto3, resp, request): "path": request.url, "exception": repr(e), } - resp.raw = self._write_tempfile(lambda x: x.write(ensure_binary(json.dumps(message)))) + resp.raw = self._write_tempfile( + lambda x: x.write(ensure_binary(json.dumps(message))) + ) resp.close = resp.raw.close return resp - key_headers = response['ResponseMetadata']['HTTPHeaders'] - resp.headers = CaseInsensitiveDict({ - "Content-Type": key_headers.get('content-type', "text/plain"), - "Content-Length": key_headers['content-length'], - "Last-Modified": key_headers['last-modified'], - }) + key_headers = response["ResponseMetadata"]["HTTPHeaders"] + resp.headers = CaseInsensitiveDict( + { + "Content-Type": key_headers.get("content-type", "text/plain"), + "Content-Length": key_headers["content-length"], + "Last-Modified": key_headers["last-modified"], + } + ) resp.raw = self._write_tempfile(key.download_fileobj) resp.close = resp.raw.close @@ -101,11 +108,13 @@ def _send_boto(self, boto, resp, request): if key and key.exists: modified = key.last_modified content_type = key.content_type or "text/plain" - resp.headers = CaseInsensitiveDict({ - "Content-Type": content_type, - "Content-Length": key.size, - "Last-Modified": modified, - }) + resp.headers = CaseInsensitiveDict( + { + "Content-Type": content_type, + "Content-Length": key.size, + "Last-Modified": modified, + } + ) resp.raw = self._write_tempfile(key.get_contents_to_file) resp.close = resp.raw.close diff --git a/conda/gateways/connection/download.py b/conda/gateways/connection/download.py index 7e9bc7c573a..620eba0a7ed 100644 --- a/conda/gateways/connection/download.py +++ b/conda/gateways/connection/download.py @@ -1,16 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import hashlib -from logging import DEBUG, getLogger -from os.path import basename, exists, join import tempfile import warnings +from logging import DEBUG, getLogger +from os.path import basename, exists, join -from . import (ConnectionError, HTTPError, InsecureRequestWarning, InvalidSchema, - SSLError, RequestsProxyError) -from .session import CondaSession -from ..disk.delete import rm_rf from ... import CondaError from ...auxlib.ish import dals from ...auxlib.logz import stringify @@ -18,24 +13,39 @@ from ...common.io import time_recorder from ...exceptions import ( BasicClobberError, + ChecksumMismatchError, CondaDependencyError, CondaHTTPError, CondaSSLError, - ChecksumMismatchError, - maybe_raise, ProxyError, + maybe_raise, +) +from ..disk.delete import rm_rf +from . import ( + ConnectionError, + HTTPError, + InsecureRequestWarning, + InvalidSchema, + RequestsProxyError, + SSLError, ) +from .session import CondaSession log = getLogger(__name__) def disable_ssl_verify_warning(): - warnings.simplefilter('ignore', InsecureRequestWarning) + warnings.simplefilter("ignore", InsecureRequestWarning) @time_recorder("download") def download( - url, target_full_path, md5=None, sha256=None, size=None, progress_update_callback=None + url, + target_full_path, + md5=None, + sha256=None, + size=None, + progress_update_callback=None, ): if exists(target_full_path): maybe_raise(BasicClobberError(target_full_path, url, context), context) @@ -50,7 +60,7 @@ def download( log.debug(stringify(resp, content_max_len=256)) resp.raise_for_status() - content_length = int(resp.headers.get('Content-Length', 0)) + content_length = int(resp.headers.get("Content-Length", 0)) # prefer sha256 over md5 when both are available checksum_builder = checksum_type = checksum = None @@ -65,18 +75,22 @@ def download( size_builder = 0 try: - with open(target_full_path, 'wb') as fh: + with open(target_full_path, "wb") as fh: streamed_bytes = 0 - for chunk in resp.iter_content(2 ** 14): + for chunk in resp.iter_content(2**14): # chunk could be the decompressed form of the real data # but we want the exact number of bytes read till now streamed_bytes = resp.raw.tell() try: fh.write(chunk) except OSError as e: - message = "Failed to write to %(target_path)s\n errno: %(errno)d" + message = ( + "Failed to write to %(target_path)s\n errno: %(errno)d" + ) # TODO: make this CondaIOError - raise CondaError(message, target_path=target_full_path, errno=e.errno) + raise CondaError( + message, target_path=target_full_path, errno=e.errno + ) checksum_builder and checksum_builder.update(chunk) size_builder += len(chunk) @@ -87,16 +101,22 @@ def download( if content_length and streamed_bytes != content_length: # TODO: needs to be a more-specific error type - message = dals(""" + message = dals( + """ Downloaded bytes did not match Content-Length url: %(url)s target_path: %(target_path)s Content-Length: %(content_length)d downloaded bytes: %(downloaded_bytes)d - """) - raise CondaError(message, url=url, target_path=target_full_path, - content_length=content_length, - downloaded_bytes=streamed_bytes) + """ + ) + raise CondaError( + message, + url=url, + target_path=target_full_path, + content_length=content_length, + downloaded_bytes=streamed_bytes, + ) except OSError as e: if e.errno == 104: @@ -107,28 +127,39 @@ def download( if checksum: actual_checksum = checksum_builder.hexdigest() if actual_checksum != checksum: - log.debug("%s mismatch for download: %s (%s != %s)", - checksum_type, url, actual_checksum, checksum) + log.debug( + "%s mismatch for download: %s (%s != %s)", + checksum_type, + url, + actual_checksum, + checksum, + ) raise ChecksumMismatchError( url, target_full_path, checksum_type, checksum, actual_checksum ) if size is not None: actual_size = size_builder if actual_size != size: - log.debug("size mismatch for download: %s (%s != %s)", url, actual_size, size) - raise ChecksumMismatchError(url, target_full_path, "size", size, actual_size) + log.debug( + "size mismatch for download: %s (%s != %s)", url, actual_size, size + ) + raise ChecksumMismatchError( + url, target_full_path, "size", size, actual_size + ) except RequestsProxyError: raise ProxyError() # see #3962 except InvalidSchema as e: - if 'SOCKS' in str(e): - message = dals(""" + if "SOCKS" in str(e): + message = dals( + """ Requests has identified that your current working environment is configured to use a SOCKS proxy, but pysocks is not installed. To proceed, remove your proxy configuration, run `conda install pysocks`, and then you can re-enable your proxy configuration. - """) + """ + ) raise CondaDependencyError(message) else: raise @@ -160,17 +191,21 @@ def download( ) except (ConnectionError, HTTPError) as e: - help_message = dals(""" + help_message = dals( + """ An HTTP error occurred when trying to retrieve this URL. HTTP errors are often intermittent, and a simple retry will get you on your way. - """) - raise CondaHTTPError(help_message, - url, - getattr(e.response, 'status_code', None), - getattr(e.response, 'reason', None), - getattr(e.response, 'elapsed', None), - e.response, - caused_by=e) + """ + ) + raise CondaHTTPError( + help_message, + url, + getattr(e.response, "status_code", None), + getattr(e.response, "reason", None), + getattr(e.response, "elapsed", None), + e.response, + caused_by=e, + ) def download_text(url): @@ -179,42 +214,52 @@ def download_text(url): try: timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs session = CondaSession() - response = session.get(url, stream=True, proxies=session.proxies, timeout=timeout) + response = session.get( + url, stream=True, proxies=session.proxies, timeout=timeout + ) if log.isEnabledFor(DEBUG): log.debug(stringify(response, content_max_len=256)) response.raise_for_status() except RequestsProxyError: raise ProxyError() # see #3962 except InvalidSchema as e: - if 'SOCKS' in str(e): - message = dals(""" + if "SOCKS" in str(e): + message = dals( + """ Requests has identified that your current working environment is configured to use a SOCKS proxy, but pysocks is not installed. To proceed, remove your proxy configuration, run `conda install pysocks`, and then you can re-enable your proxy configuration. - """) + """ + ) raise CondaDependencyError(message) else: raise except (ConnectionError, HTTPError, SSLError) as e: - status_code = getattr(e.response, 'status_code', None) + status_code = getattr(e.response, "status_code", None) if status_code == 404: - help_message = dals(""" + help_message = dals( + """ An HTTP error occurred when trying to retrieve this URL. The URL does not exist. - """) + """ + ) else: - help_message = dals(""" + help_message = dals( + """ An HTTP error occurred when trying to retrieve this URL. HTTP errors are often intermittent, and a simple retry will get you on your way. - """) - raise CondaHTTPError(help_message, - url, - status_code, - getattr(e.response, 'reason', None), - getattr(e.response, 'elapsed', None), - e.response, - caused_by=e) + """ + ) + raise CondaHTTPError( + help_message, + url, + status_code, + getattr(e.response, "reason", None), + getattr(e.response, "elapsed", None), + e.response, + caused_by=e, + ) return response.text @@ -222,12 +267,13 @@ class TmpDownload: """ Context manager to handle downloads to a tempfile """ + def __init__(self, url, verbose=True): self.url = url self.verbose = verbose def __enter__(self): - if '://' not in self.url: + if "://" not in self.url: # if we provide the file itself, no tmp dir is created self.tmp_dir = None return self.url diff --git a/conda/gateways/connection/session.py b/conda/gateways/connection/session.py index f2be0228510..c8e8cf22643 100644 --- a/conda/gateways/connection/session.py +++ b/conda/gateways/connection/session.py @@ -1,41 +1,58 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger from threading import local -from . import (AuthBase, BaseAdapter, HTTPAdapter, Session, _basic_auth_str, - extract_cookies_to_jar, get_auth_from_url, get_netrc_auth, Retry) -from .adapters.ftp import FTPAdapter -from .adapters.localfs import LocalFSAdapter -from .adapters.s3 import S3Adapter -from ..anaconda_client import read_binstar_tokens from ...auxlib.ish import dals from ...base.constants import CONDA_HOMEPAGE_URL from ...base.context import context -from ...common.url import (add_username_and_password, get_proxy_username_and_pass, - split_anaconda_token, urlparse) +from ...common.url import ( + add_username_and_password, + get_proxy_username_and_pass, + split_anaconda_token, + urlparse, +) from ...exceptions import ProxyError +from ..anaconda_client import read_binstar_tokens +from . import ( + AuthBase, + BaseAdapter, + HTTPAdapter, + Retry, + Session, + _basic_auth_str, + extract_cookies_to_jar, + get_auth_from_url, + get_netrc_auth, +) +from .adapters.ftp import FTPAdapter +from .adapters.localfs import LocalFSAdapter +from .adapters.s3 import S3Adapter log = getLogger(__name__) RETRIES = 3 -CONDA_SESSION_SCHEMES = frozenset(( - "http", - "https", - "ftp", - "s3", - "file", -)) +CONDA_SESSION_SCHEMES = frozenset( + ( + "http", + "https", + "ftp", + "s3", + "file", + ) +) -class EnforceUnusedAdapter(BaseAdapter): +class EnforceUnusedAdapter(BaseAdapter): def send(self, request, *args, **kwargs): - message = dals(""" + message = dals( + """ EnforceUnusedAdapter called with url %s This command is using a remote connection in offline mode. - """ % request.url) + """ + % request.url + ) raise RuntimeError(message) def close(self): @@ -49,7 +66,7 @@ class CondaSessionType(type): """ def __new__(mcs, name, bases, dct): - dct['_thread_local'] = local() + dct["_thread_local"] = local() return super().__new__(mcs, name, bases, dct) def __call__(cls): @@ -61,11 +78,12 @@ def __call__(cls): class CondaSession(Session, metaclass=CondaSessionType): - def __init__(self): super().__init__() - self.auth = CondaHttpAuth() # TODO: should this just be for certain protocol adapters? + self.auth = ( + CondaHttpAuth() + ) # TODO: should this just be for certain protocol adapters? self.proxies.update(context.proxy_servers) @@ -78,10 +96,12 @@ def __init__(self): else: # Configure retries - retry = Retry(total=context.remote_max_retries, - backoff_factor=context.remote_backoff_factor, - status_forcelist=[413, 429, 500, 503], - raise_on_status=False) + retry = Retry( + total=context.remote_max_retries, + backoff_factor=context.remote_backoff_factor, + status_forcelist=[413, 429, 500, 503], + raise_on_status=False, + ) http_adapter = HTTPAdapter(max_retries=retry) self.mount("http://", http_adapter) self.mount("https://", http_adapter) @@ -90,7 +110,7 @@ def __init__(self): self.mount("file://", LocalFSAdapter()) - self.headers['User-Agent'] = context.user_agent + self.headers["User-Agent"] = context.user_agent self.verify = context.ssl_verify @@ -106,7 +126,7 @@ class CondaHttpAuth(AuthBase): def __call__(self, request): request.url = CondaHttpAuth.add_binstar_token(request.url) self._apply_basic_auth(request) - request.register_hook('response', self.handle_407) + request.register_hook("response", self.handle_407) return request @staticmethod @@ -120,7 +140,7 @@ def _apply_basic_auth(request): auth = get_netrc_auth(request.url) if isinstance(auth, tuple) and len(auth) == 2: - request.headers['Authorization'] = _basic_auth_str(*auth) + request.headers["Authorization"] = _basic_auth_str(*auth) return request @@ -132,6 +152,7 @@ def add_binstar_token(url): if clean_url.startswith(binstar_url): log.debug("Adding anaconda token for url <%s>", clean_url) from ...models.channel import Channel + channel = Channel(clean_url) channel.token = token return channel.url(with_credentials=True) @@ -163,7 +184,7 @@ def handle_407(response, **kwargs): # pragma: no cover response.content response.close() - proxies = kwargs.pop('proxies') + proxies = kwargs.pop("proxies") proxy_scheme = urlparse(response.url).scheme if proxy_scheme not in proxies: @@ -185,12 +206,12 @@ def handle_407(response, **kwargs): # pragma: no cover proxy_url = add_username_and_password(proxy_url, username, password) proxy_authorization_header = _basic_auth_str(username, password) proxies[proxy_scheme] = proxy_url - kwargs['proxies'] = proxies + kwargs["proxies"] = proxies prep = response.request.copy() extract_cookies_to_jar(prep._cookies, response.request, response.raw) prep.prepare_cookies(prep._cookies) - prep.headers['Proxy-Authorization'] = proxy_authorization_header + prep.headers["Proxy-Authorization"] = proxy_authorization_header _response = response.connection.send(prep, **kwargs) _response.history.append(response) diff --git a/conda/gateways/disk/__init__.py b/conda/gateways/disk/__init__.py index f8f62b8d481..55b4669f9ca 100644 --- a/conda/gateways/disk/__init__.py +++ b/conda/gateways/disk/__init__.py @@ -1,12 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import os +import sys from errno import EACCES, EEXIST, ENOENT, ENOTEMPTY, EPERM, errorcode from logging import getLogger -import os -from os.path import basename, isdir, dirname +from os.path import basename, dirname, isdir from subprocess import CalledProcessError -import sys from time import sleep from ...common.compat import on_win @@ -18,24 +17,27 @@ def exp_backoff_fn(fn, *args, **kwargs): """Mostly for retrying file operations that fail on Windows due to virus scanners""" - max_tries = kwargs.pop('max_tries', MAX_TRIES) + max_tries = kwargs.pop("max_tries", MAX_TRIES) if not on_win: return fn(*args, **kwargs) import random + # with max_tries = 6, max total time ~= 3.2 sec # with max_tries = 7, max total time ~= 6.5 sec def sleep_some(n, exc): - if n == max_tries-1: + if n == max_tries - 1: raise - sleep_time = ((2 ** n) + random.random()) * 0.1 + sleep_time = ((2**n) + random.random()) * 0.1 caller_frame = sys._getframe(1) - log.trace("retrying %s/%s %s() in %g sec", - basename(caller_frame.f_code.co_filename), - caller_frame.f_lineno, - fn.__name__, - sleep_time) + log.trace( + "retrying %s/%s %s() in %g sec", + basename(caller_frame.f_code.co_filename), + caller_frame.f_lineno, + fn.__name__, + sleep_time, + ) sleep(sleep_time) for n in range(max_tries): @@ -50,7 +52,9 @@ def sleep_some(n, exc): # errno.ENOTEMPTY OSError(41, 'The directory is not empty') raise else: - log.warn("Uncaught backoff with errno %s %d", errorcode[e.errno], e.errno) + log.warn( + "Uncaught backoff with errno %s %d", errorcode[e.errno], e.errno + ) raise except CalledProcessError as e: sleep_some(n, e) @@ -61,7 +65,7 @@ def sleep_some(n, exc): def mkdir_p(path): # putting this here to help with circular imports try: - log.trace('making directory %s', path) + log.trace("making directory %s", path) if path: os.makedirs(path) return isdir(path) and path @@ -78,7 +82,7 @@ def mkdir_p_sudo_safe(path): base_dir = dirname(path) if not isdir(base_dir): mkdir_p_sudo_safe(base_dir) - log.trace('making directory %s', path) + log.trace("making directory %s", path) try: os.mkdir(path) except OSError as e: @@ -99,6 +103,10 @@ def mkdir_p_sudo_safe(path): try: os.chmod(path, 0o2775) except OSError as e: - log.trace("Failed to set permissions to 2775 on %s (%d %d)", - path, e.errno, errorcode[e.errno]) + log.trace( + "Failed to set permissions to 2775 on %s (%d %d)", + path, + e.errno, + errorcode[e.errno], + ) pass diff --git a/conda/gateways/disk/create.py b/conda/gateways/disk/create.py index c7b2c49081e..24ec00c9d76 100644 --- a/conda/gateways/disk/create.py +++ b/conda/gateways/disk/create.py @@ -1,21 +1,15 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import codecs -from errno import EACCES, EPERM, EROFS -from logging import getLogger import os -from os.path import basename, dirname, isdir, isfile, join, splitext -from shutil import copyfileobj, copystat import sys import tempfile import warnings as _warnings +from errno import EACCES, EPERM, EROFS +from logging import getLogger +from os.path import basename, dirname, isdir, isfile, join, splitext +from shutil import copyfileobj, copystat -from . import mkdir_p -from .delete import path_is_clean, rm_rf -from .link import islink, lexists, link, readlink, symlink -from .permissions import make_executable -from .update import touch from ... import CondaError from ...auxlib.ish import dals from ...base.constants import CONDA_PACKAGE_EXTENSION_V1, PACKAGE_CACHE_MAGIC_FILE @@ -25,6 +19,11 @@ from ...common.serialize import json_dump from ...exceptions import BasicClobberError, CondaOSError, maybe_raise from ...models.enums import LinkType +from . import mkdir_p +from .delete import path_is_clean, rm_rf +from .link import islink, lexists, link, readlink, symlink +from .permissions import make_executable +from .update import touch # we have our own TemporaryDirectory implementation both for historical reasons and because @@ -45,7 +44,7 @@ class TemporaryDirectory: name = None _closed = False - def __init__(self, suffix="", prefix='tmp', dir=None): + def __init__(self, suffix="", prefix="tmp", dir=None): self.name = tempfile.mkdtemp(suffix, prefix, dir) def __repr__(self): @@ -56,6 +55,7 @@ def __enter__(self): def cleanup(self, _warn=False, _warnings=_warnings): from .delete import rm_rf as _rm_rf + if self.name and not self._closed: try: _rm_rf(self.name) @@ -74,12 +74,13 @@ def __del__(self): log = getLogger(__name__) -stdoutlog = getLogger('conda.stdoutlog') +stdoutlog = getLogger("conda.stdoutlog") # in __init__.py to help with circular imports mkdir_p = mkdir_p -python_entry_point_template = dals(r""" +python_entry_point_template = dals( + r""" # -*- coding: utf-8 -*- import re import sys @@ -89,9 +90,11 @@ def __del__(self): if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(%(func)s()) -""") # NOQA +""" +) # NOQA -application_entry_point_template = dals(""" +application_entry_point_template = dals( + """ # -*- coding: utf-8 -*- if __name__ == '__main__': import os @@ -100,29 +103,33 @@ def __del__(self): if len(sys.argv) > 1: args += sys.argv[1:] os.execv(args[0], args) -""") +""" +) def write_as_json_to_file(file_path, obj): log.trace("writing json to file %s", file_path) - with codecs.open(file_path, mode='wb', encoding='utf-8') as fo: + with codecs.open(file_path, mode="wb", encoding="utf-8") as fo: json_str = json_dump(obj) fo.write(json_str) def create_python_entry_point(target_full_path, python_full_path, module, func): if lexists(target_full_path): - maybe_raise(BasicClobberError( - source_path=None, - target_path=target_full_path, - context=context, - ), context) + maybe_raise( + BasicClobberError( + source_path=None, + target_path=target_full_path, + context=context, + ), + context, + ) - import_name = func.split('.')[0] + import_name = func.split(".")[0] pyscript = python_entry_point_template % { - 'module': module, - 'func': func, - 'import_name': import_name, + "module": module, + "func": func, + "import_name": import_name, } if python_full_path is not None: from ...core.portability import generate_shebang_for_entry_point @@ -131,7 +138,7 @@ def create_python_entry_point(target_full_path, python_full_path, module, func): else: shebang = None - with codecs.open(target_full_path, mode='wb', encoding='utf-8') as fo: + with codecs.open(target_full_path, mode="wb", encoding="utf-8") as fo: if shebang is not None: fo.write(shebang) fo.write(pyscript) @@ -142,15 +149,20 @@ def create_python_entry_point(target_full_path, python_full_path, module, func): return target_full_path -def create_application_entry_point(source_full_path, target_full_path, python_full_path): +def create_application_entry_point( + source_full_path, target_full_path, python_full_path +): # source_full_path: where the entry point file points to # target_full_path: the location of the new entry point file being created if lexists(target_full_path): - maybe_raise(BasicClobberError( - source_path=None, - target_path=target_full_path, - context=context, - ), context) + maybe_raise( + BasicClobberError( + source_path=None, + target_path=target_full_path, + context=context, + ), + context, + ) entry_point = application_entry_point_template % { "source_full_path": win_path_double_escape(source_full_path), @@ -158,9 +170,9 @@ def create_application_entry_point(source_full_path, target_full_path, python_fu if not isdir(dirname(target_full_path)): mkdir_p(dirname(target_full_path)) with open(target_full_path, "w") as fo: - if ' ' in python_full_path: + if " " in python_full_path: python_full_path = ensure_pad(python_full_path, '"') - fo.write('#!%s\n' % python_full_path) + fo.write("#!%s\n" % python_full_path) fo.write(entry_point) make_executable(target_full_path) @@ -194,7 +206,9 @@ def progress_update(self): self.progress_update_callback(rel_pos) -def extract_tarball(tarball_full_path, destination_directory=None, progress_update_callback=None): +def extract_tarball( + tarball_full_path, destination_directory=None, progress_update_callback=None +): import conda_package_handling.api if destination_directory is None: @@ -209,10 +223,14 @@ def extract_tarball(tarball_full_path, destination_directory=None, progress_upda # have a .conda_trash extension though, so it's ok to just write into # the same existing folder. if not path_is_clean(destination_directory): - log.debug("package folder %s was not empty, but we're writing there.", - destination_directory) + log.debug( + "package folder %s was not empty, but we're writing there.", + destination_directory, + ) - conda_package_handling.api.extract(tarball_full_path, dest_dir=destination_directory) + conda_package_handling.api.extract( + tarball_full_path, dest_dir=destination_directory + ) if hasattr(conda_package_handling.api, "THREADSAFE_EXTRACT"): return # indicates conda-package-handling 2.x, which implements --no-same-owner @@ -236,12 +254,15 @@ def make_menu(prefix, file_path, remove=False): """ if not on_win: return - elif basename(prefix).startswith('_'): - log.warn("Environment name starts with underscore '_'. Skipping menu installation.") + elif basename(prefix).startswith("_"): + log.warn( + "Environment name starts with underscore '_'. Skipping menu installation." + ) return try: import menuinst + menuinst.install(join(prefix, win_path_ok(file_path)), remove, prefix) except Exception: stdoutlog.error("menuinst Exception", exc_info=True) @@ -265,7 +286,7 @@ def create_hard_link_or_copy(src, dst): log.trace("creating hard link %s => %s", src, dst) link(src, dst) except OSError: - log.info('hard link failed, so copying %s => %s', src, dst) + log.info("hard link failed, so copying %s => %s", src, dst) _do_copy(src, dst) @@ -292,10 +313,8 @@ def create_fake_executable_softlink(src, dst): assert on_win src_root, _ = splitext(src) # TODO: this open will clobber, consider raising - with open(dst, 'w') as f: - f.write("@echo off\n" - "call \"%s\" %%*\n" - "" % src_root) + with open(dst, "w") as f: + f.write("@echo off\n" 'call "%s" %%*\n' "" % src_root) return dst @@ -303,7 +322,7 @@ def copy(src, dst): # on unix, make sure relative symlinks stay symlinks if not on_win and islink(src): src_points_to = readlink(src) - if not src_points_to.startswith('/'): + if not src_points_to.startswith("/"): # copy relative symlinks as symlinks log.trace("soft linking %s => %s", src, dst) symlink(src_points_to, dst) @@ -320,8 +339,8 @@ def _do_copy(src, dst): # Same size as used by Linux cp command (has performance advantage). # Python's default is 16k. buffer_size = 4194304 # 4 * 1024 * 1024 == 4 MB - with open(src, 'rb') as fsrc: - with open(dst, 'wb') as fdst: + with open(src, "rb") as fsrc: + with open(dst, "wb") as fdst: copyfileobj(fsrc, fdst, buffer_size) try: @@ -329,7 +348,7 @@ def _do_copy(src, dst): except OSError as e: # pragma: no cover # shutil.copystat gives a permission denied when using the os.setxattr function # on the security.selinux property. - log.debug('%r', e) + log.debug("%r", e) def create_link(src, dst, link_type=LinkType.hardlink, force=False): @@ -345,8 +364,10 @@ def create_link(src, dst, link_type=LinkType.hardlink, force=False): return if not lexists(src): - raise CondaError("Cannot link a source that does not exist. %s\n" - "Running `conda clean --packages` may resolve your problem." % src) + raise CondaError( + "Cannot link a source that does not exist. %s\n" + "Running `conda clean --packages` may resolve your problem." % src + ) if lexists(dst): if not force: @@ -362,10 +383,15 @@ def create_link(src, dst, link_type=LinkType.hardlink, force=False): link(src, dst) except OSError as e: log.debug("%r", e) - log.debug("hard-link failed. falling back to copy\n" - " error: %r\n" - " src: %s\n" - " dst: %s", e, src, dst) + log.debug( + "hard-link failed. falling back to copy\n" + " error: %r\n" + " src: %s\n" + " dst: %s", + e, + src, + dst, + ) copy(src, dst) elif link_type == LinkType.softlink: @@ -376,7 +402,9 @@ def create_link(src, dst, link_type=LinkType.hardlink, force=False): raise CondaError("Did not expect linktype=%r" % link_type) -def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, prefix, py_ver): +def compile_multiple_pyc( + python_exe_full_path, py_full_paths, pyc_full_paths, prefix, py_ver +): py_full_paths = tuple(py_full_paths) pyc_full_paths = tuple(pyc_full_paths) if len(py_full_paths) == 0: @@ -386,19 +414,20 @@ def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, pr try: for f in py_full_paths: f = os.path.relpath(f, prefix) - if hasattr(f, 'encode'): - f = f.encode(sys.getfilesystemencoding(), errors='replace') + if hasattr(f, "encode"): + f = f.encode(sys.getfilesystemencoding(), errors="replace") os.write(fd, f + b"\n") os.close(fd) command = ["-Wi", "-m", "compileall", "-q", "-l", "-i", filename] # if the python version in the prefix is 3.5+, we have some extra args. # -j 0 will do the compilation in parallel, with os.cpu_count() cores - if int(py_ver[0]) >= 3 and int(py_ver.split('.')[1]) > 5: + if int(py_ver[0]) >= 3 and int(py_ver.split(".")[1]) > 5: command.extend(["-j", "0"]) command[0:0] = [python_exe_full_path] # command[0:0] = ['--cwd', prefix, '--dev', '-p', prefix, python_exe_full_path] log.trace(command) from conda.gateways.subprocess import any_subprocess + # from conda.common.io import env_vars # This stack does not maintain its _argparse_args correctly? # from conda.base.context import stack_context_default @@ -411,7 +440,8 @@ def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, pr created_pyc_paths = [] for py_full_path, pyc_full_path in zip(py_full_paths, pyc_full_paths): if not isfile(pyc_full_path): - message = dals(""" + message = dals( + """ pyc file failed to compile successfully (run_command failed) python_exe_full_path: %s py_full_path: %s @@ -419,9 +449,17 @@ def compile_multiple_pyc(python_exe_full_path, py_full_paths, pyc_full_paths, pr compile rc: %s compile stdout: %s compile stderr: %s - """) - log.info(message, python_exe_full_path, py_full_path, pyc_full_path, - rc, stdout, stderr) + """ + ) + log.info( + message, + python_exe_full_path, + py_full_path, + pyc_full_path, + rc, + stdout, + stderr, + ) else: created_pyc_paths.append(pyc_full_path) @@ -432,9 +470,9 @@ def create_package_cache_directory(pkgs_dir): # returns False if package cache directory cannot be created try: log.trace("creating package cache directory '%s'", pkgs_dir) - sudo_safe = expand(pkgs_dir).startswith(expand('~')) + sudo_safe = expand(pkgs_dir).startswith(expand("~")) touch(join(pkgs_dir, PACKAGE_CACHE_MAGIC_FILE), mkdir=True, sudo_safe=sudo_safe) - touch(join(pkgs_dir, 'urls'), sudo_safe=sudo_safe) + touch(join(pkgs_dir, "urls"), sudo_safe=sudo_safe) except OSError as e: if e.errno in (EACCES, EPERM, EROFS): log.trace("cannot create package cache directory '%s'", pkgs_dir) @@ -450,10 +488,10 @@ def create_envs_directory(envs_dir): # The magic file being used here could change in the future. Don't write programs # outside this code base that rely on the presence of this file. # This value is duplicated in conda.base.context._first_writable_envs_dir(). - envs_dir_magic_file = join(envs_dir, '.conda_envs_dir_test') + envs_dir_magic_file = join(envs_dir, ".conda_envs_dir_test") try: log.trace("creating envs directory '%s'", envs_dir) - sudo_safe = expand(envs_dir).startswith(expand('~')) + sudo_safe = expand(envs_dir).startswith(expand("~")) touch(join(envs_dir, envs_dir_magic_file), mkdir=True, sudo_safe=sudo_safe) except OSError as e: if e.errno in (EACCES, EPERM, EROFS): diff --git a/conda/gateways/disk/delete.py b/conda/gateways/disk/delete.py index 8120faf7d62..f145eb5f1ba 100644 --- a/conda/gateways/disk/delete.py +++ b/conda/gateways/disk/delete.py @@ -1,21 +1,30 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from errno import ENOENT import fnmatch -from logging import getLogger -from os import environ, getcwd, makedirs, rename, rmdir, scandir, unlink, walk -from os.path import abspath, basename, dirname, exists, isdir, isfile, join, normpath, split import shutil -from subprocess import CalledProcessError, STDOUT, check_output import sys +from errno import ENOENT +from logging import getLogger +from os import environ, getcwd, makedirs, rename, rmdir, scandir, unlink, walk +from os.path import ( + abspath, + basename, + dirname, + exists, + isdir, + isfile, + join, + normpath, + split, +) +from subprocess import STDOUT, CalledProcessError, check_output -from . import MAX_TRIES, exp_backoff_fn -from .link import islink, lexists -from .permissions import make_writable, recursive_make_writable from ...base.constants import CONDA_TEMP_EXTENSION from ...base.context import context from ...common.compat import on_win +from . import MAX_TRIES, exp_backoff_fn +from .link import islink, lexists +from .permissions import make_writable, recursive_make_writable if not on_win: from ...common.path import which @@ -38,7 +47,9 @@ def rmtree(path, *args, **kwargs): # out = check_output('DEL /F/Q/S *.* > NUL 2> NUL'.format(path), shell=True, # stderr=STDOUT, cwd=path) - out = check_output(f'RD /S /Q "{path}" > NUL 2> NUL', shell=True, stderr=STDOUT) + out = check_output( + f'RD /S /Q "{path}" > NUL 2> NUL', shell=True, stderr=STDOUT + ) except: try: # Try to delete in Unicode @@ -46,7 +57,9 @@ def rmtree(path, *args, **kwargs): from conda.auxlib.compat import Utf8NamedTemporaryFile from conda.utils import quote_for_shell - with Utf8NamedTemporaryFile(mode="w", suffix=".bat", delete=False) as batch_file: + with Utf8NamedTemporaryFile( + mode="w", suffix=".bat", delete=False + ) as batch_file: batch_file.write(f"RD /S {quote_for_shell(path)}\n") batch_file.write("chcp 65001\n") batch_file.write(f"RD /S {quote_for_shell(path)}\n") @@ -57,45 +70,65 @@ def rmtree(path, *args, **kwargs): with open(name) as contents: content = contents.read() assert path in content - comspec = environ['COMSPEC'] + comspec = environ["COMSPEC"] CREATE_NO_WINDOW = 0x08000000 # It is essential that we `pass stdout=None, stderr=None, stdin=None` here because # if we do not, then the standard console handles get attached and chcp affects the # parent process (and any which share those console handles!) - out = check_output([comspec, '/d', '/c', name], shell=False, - stdout=None, stderr=None, stdin=None, - creationflags=CREATE_NO_WINDOW) + out = check_output( + [comspec, "/d", "/c", name], + shell=False, + stdout=None, + stderr=None, + stdin=None, + creationflags=CREATE_NO_WINDOW, + ) except CalledProcessError as e: if e.returncode != 5: - log.error("Removing folder {} the fast way failed. Output was: {}" - .format(name, out)) + log.error( + "Removing folder {} the fast way failed. Output was: {}".format( + name, out + ) + ) raise else: - log.debug("removing dir contents the fast way failed. Output was: {}" - .format(out)) + log.debug( + "removing dir contents the fast way failed. Output was: {}".format( + out + ) + ) else: try: - makedirs('.empty') + makedirs(".empty") except: pass # yes, this looks strange. See # https://unix.stackexchange.com/a/79656/34459 # https://web.archive.org/web/20130929001850/http://linuxnote.net/jianingy/en/linux/a-fast-way-to-remove-huge-number-of-files.html # NOQA - if isdir('.empty'): - rsync = which('rsync') + if isdir(".empty"): + rsync = which("rsync") if rsync: try: out = check_output( - [rsync, '-a', '--force', '--delete', join(getcwd(), '.empty') + "/", - path + "/"], - stderr=STDOUT) + [ + rsync, + "-a", + "--force", + "--delete", + join(getcwd(), ".empty") + "/", + path + "/", + ], + stderr=STDOUT, + ) except CalledProcessError: - log.debug(f"removing dir contents the fast way failed. Output was: {out}") + log.debug( + f"removing dir contents the fast way failed. Output was: {out}" + ) - shutil.rmtree('.empty') + shutil.rmtree(".empty") shutil.rmtree(path) @@ -116,7 +149,7 @@ def unlink_or_rename_to_trash(path): # on windows, it is important to use the rename program, as just using python's # rename leads to permission errors when files are in use. condabin_dir = join(context.conda_prefix, "condabin") - trash_script = join(condabin_dir, 'rename_tmp.bat') + trash_script = join(condabin_dir, "rename_tmp.bat") if exists(trash_script): _dirname, _fn = split(path) dest_fn = path + ".conda_trash" @@ -126,19 +159,35 @@ def unlink_or_rename_to_trash(path): counter += 1 out = "< empty >" try: - out = check_output(['cmd.exe', '/C', trash_script, _dirname, _fn, - basename(dest_fn)], - stderr=STDOUT) + out = check_output( + [ + "cmd.exe", + "/C", + trash_script, + _dirname, + _fn, + basename(dest_fn), + ], + stderr=STDOUT, + ) except CalledProcessError: - log.debug("renaming file path {} to trash failed. Output was: {}" - .format(path, out)) + log.debug( + "renaming file path {} to trash failed. Output was: {}".format( + path, out + ) + ) else: - log.debug("{} is missing. Conda was not installed correctly or has been " - "corrupted. Please file an issue on the conda github repo." - .format(trash_script)) - log.warn("Could not remove or rename {}. Please remove this file manually (you " - "may need to reboot to free file handles)".format(path)) + log.debug( + "{} is missing. Conda was not installed correctly or has been " + "corrupted. Please file an issue on the conda github repo.".format( + trash_script + ) + ) + log.warn( + "Could not remove or rename {}. Please remove this file manually (you " + "may need to reboot to free file handles)".format(path) + ) def remove_empty_parent_paths(path): @@ -188,8 +237,9 @@ def delete_trash(prefix): for root, dirs, files in walk(prefix, topdown=True): dirs[:] = [d for d in dirs if d not in exclude] for fn in files: - if (fnmatch.fnmatch(fn, "*.conda_trash*") or - fnmatch.fnmatch(fn, "*" + CONDA_TEMP_EXTENSION)): + if fnmatch.fnmatch(fn, "*.conda_trash*") or fnmatch.fnmatch( + fn, "*" + CONDA_TEMP_EXTENSION + ): filename = join(root, fn) try: unlink(filename) @@ -203,7 +253,7 @@ def backoff_rmdir(dirpath, max_tries=MAX_TRIES): return def retry(func, path, exc_info): - if getattr(exc_info[1], 'errno', None) == ENOENT: + if getattr(exc_info[1], "errno", None) == ENOENT: return recursive_make_writable(dirname(path), max_tries=max_tries) func(path) @@ -217,6 +267,7 @@ def _rmdir(path): log.trace("no such file or directory: %s", path) else: raise + try: rmtree(dirpath) # we don't really care about errors that much. We'll catch remaining files @@ -237,7 +288,9 @@ def path_is_clean(path): if not clean: for root, dirs, fns in walk(path): for fn in fns: - if not (fnmatch.fnmatch(fn, "*.conda_trash*") or - fnmatch.fnmatch(fn, "*" + CONDA_TEMP_EXTENSION)): + if not ( + fnmatch.fnmatch(fn, "*.conda_trash*") + or fnmatch.fnmatch(fn, "*" + CONDA_TEMP_EXTENSION) + ): return False return True diff --git a/conda/gateways/disk/link.py b/conda/gateways/disk/link.py index b5402313507..bf86a0f9f46 100644 --- a/conda/gateways/disk/link.py +++ b/conda/gateways/disk/link.py @@ -4,10 +4,12 @@ # which is MIT licensed by Jason R. Coombs. # https://github.com/jaraco/skeleton/issues/1#issuecomment-285448440 +import sys from logging import getLogger from os import chmod as os_chmod -from os.path import abspath, isdir, islink as os_islink, lexists as os_lexists -import sys +from os.path import abspath, isdir +from os.path import islink as os_islink +from os.path import lexists as os_lexists from ...common.compat import on_win from ...exceptions import CondaOSError, ParseError @@ -15,13 +17,15 @@ __all__ = ("islink", "lchmod", "lexists", "link", "readlink", "symlink") log = getLogger(__name__) -PYPY = sys.implementation.name == 'pypy' +PYPY = sys.implementation.name == "pypy" try: from os import lchmod as os_lchmod + lchmod = os_lchmod except ImportError: # pragma: no cover + def lchmod(path, mode): # On systems that don't allow permissions on symbolic links, skip # links entirely. @@ -31,20 +35,24 @@ def lchmod(path, mode): if not on_win: # pragma: win no cover from os import link, symlink + link = link symlink = symlink else: # pragma: unix no cover from ctypes import windll, wintypes + CreateHardLink = windll.kernel32.CreateHardLinkW CreateHardLink.restype = wintypes.BOOL - CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR, - wintypes.LPVOID] + CreateHardLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR, wintypes.LPVOID] try: CreateSymbolicLink = windll.kernel32.CreateSymbolicLinkW CreateSymbolicLink.restype = wintypes.BOOL - CreateSymbolicLink.argtypes = [wintypes.LPCWSTR, wintypes.LPCWSTR, - wintypes.DWORD] + CreateSymbolicLink.argtypes = [ + wintypes.LPCWSTR, + wintypes.LPCWSTR, + wintypes.DWORD, + ] except AttributeError: CreateSymbolicLink = None @@ -56,7 +64,7 @@ def win_hard_link(src, dst): def win_soft_link(src, dst): """Equivalent to os.symlink, using the win32 CreateSymbolicLink call.""" if CreateSymbolicLink is None: - raise CondaOSError('win32 soft link not supported') + raise CondaOSError("win32 soft link not supported") if not CreateSymbolicLink(dst, src, isdir(src)): raise CondaOSError(f"win32 soft link failed\n src: {src}\n dst: {dst}") @@ -66,18 +74,18 @@ def win_soft_link(src, dst): if not (on_win and PYPY): from os import readlink + islink = os_islink lexists = os_lexists readlink = readlink else: # pragma: no cover - from ctypes import (POINTER, Structure, byref, c_uint64, cast, windll, - wintypes) + import builtins import inspect + import sys + from ctypes import POINTER, Structure, byref, c_uint64, cast, windll, wintypes from os import getcwd from os.path import isfile - import sys - import builtins def islink(path): """Determine if the given path is a symlink""" @@ -101,14 +109,14 @@ def lexists(path): class WIN32_FIND_DATA(Structure): _fields_ = [ - ('file_attributes', wintypes.DWORD), - ('creation_time', wintypes.FILETIME), - ('last_access_time', wintypes.FILETIME), - ('last_write_time', wintypes.FILETIME), - ('file_size_words', wintypes.DWORD*2), - ('reserved', wintypes.DWORD*2), - ('filename', wintypes.WCHAR*MAX_PATH), - ('alternate_filename', wintypes.WCHAR*14), + ("file_attributes", wintypes.DWORD), + ("creation_time", wintypes.FILETIME), + ("last_access_time", wintypes.FILETIME), + ("last_write_time", wintypes.FILETIME), + ("file_size_words", wintypes.DWORD * 2), + ("reserved", wintypes.DWORD * 2), + ("filename", wintypes.WCHAR * MAX_PATH), + ("alternate_filename", wintypes.WCHAR * 14), ] @property @@ -188,7 +196,7 @@ def __str__(self): return self.message def __repr__(self): - return '{self.__class__.__name__}({self.winerror})'.format(**vars()) + return "{self.__class__.__name__}({self.winerror})".format(**vars()) def _is_symlink(find_data): return find_data.reserved[0] == IO_REPARSE_TAG_SYMLINK @@ -201,13 +209,13 @@ def _patch_path(path): See http://msdn.microsoft.com/en-us/library/aa365247%28v=vs.85%29.aspx for details. """ # NOQA - if path.startswith('\\\\?\\'): + if path.startswith("\\\\?\\"): return path path = abspath(path) - if not path[1] == ':': + if not path[1] == ":": # python doesn't include the drive letter, but \\?\ requires it path = getcwd()[:2] + path - return '\\\\?\\' + path + return "\\\\?\\" + path def local_format(string): """ @@ -267,25 +275,25 @@ def is_reparse_point(path): be determined. """ res = GetFileAttributes(path) - return ( - res != INVALID_FILE_ATTRIBUTES - and bool(res & FILE_ATTRIBUTE_REPARSE_POINT) + return res != INVALID_FILE_ATTRIBUTES and bool( + res & FILE_ATTRIBUTE_REPARSE_POINT ) OPEN_EXISTING = 3 FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000 FILE_FLAG_BACKUP_SEMANTICS = 0x2000000 - FSCTL_GET_REPARSE_POINT = 0x900a8 + FSCTL_GET_REPARSE_POINT = 0x900A8 LPDWORD = POINTER(wintypes.DWORD) LPOVERLAPPED = wintypes.LPVOID # VOLUME_NAME_DOS = 0 class SECURITY_ATTRIBUTES(Structure): _fields_ = ( - ('length', wintypes.DWORD), - ('p_security_descriptor', wintypes.LPVOID), - ('inherit_handle', wintypes.BOOLEAN), + ("length", wintypes.DWORD), + ("p_security_descriptor", wintypes.LPVOID), + ("inherit_handle", wintypes.BOOLEAN), ) + LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES) CreateFile = windll.kernel32.CreateFileW @@ -304,19 +312,19 @@ class SECURITY_ATTRIBUTES(Structure): CloseHandle.argtypes = (wintypes.HANDLE,) CloseHandle.restype = wintypes.BOOLEAN - from ctypes import Array, create_string_buffer, c_byte, c_ulong, c_ushort, sizeof + from ctypes import Array, c_byte, c_ulong, c_ushort, create_string_buffer, sizeof class REPARSE_DATA_BUFFER(Structure): _fields_ = [ - ('tag', c_ulong), - ('data_length', c_ushort), - ('reserved', c_ushort), - ('substitute_name_offset', c_ushort), - ('substitute_name_length', c_ushort), - ('print_name_offset', c_ushort), - ('print_name_length', c_ushort), - ('flags', c_ulong), - ('path_buffer', c_byte * 1), + ("tag", c_ulong), + ("data_length", c_ushort), + ("reserved", c_ushort), + ("substitute_name_offset", c_ushort), + ("substitute_name_length", c_ushort), + ("print_name_offset", c_ushort), + ("print_name_length", c_ushort), + ("flags", c_ulong), + ("path_buffer", c_byte * 1), ] def get_print_name(self): @@ -336,9 +344,15 @@ def readlink(link): readlink(link) -> target Return a string representing the path to which the symbolic link points. """ - handle = CreateFile(link, 0, 0, None, OPEN_EXISTING, - FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, - None) + handle = CreateFile( + link, + 0, + 0, + None, + OPEN_EXISTING, + FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS, + None, + ) if handle == INVALID_HANDLE_VALUE: raise OSError() @@ -367,7 +381,9 @@ def readlink(link): ] DeviceIoControl.restype = wintypes.BOOL - def reparse_DeviceIoControl(device, io_control_code, in_buffer, out_buffer, overlapped=None): + def reparse_DeviceIoControl( + device, io_control_code, in_buffer, out_buffer, overlapped=None + ): if overlapped is not None: raise NotImplementedError("overlapped handles not yet supported") @@ -383,12 +399,14 @@ def reparse_DeviceIoControl(device, io_control_code, in_buffer, out_buffer, over res = DeviceIoControl( device, io_control_code, - in_buffer, in_buffer_size, - out_buffer, out_buffer_size, + in_buffer, + in_buffer_size, + out_buffer, + out_buffer_size, returned_bytes, overlapped, ) handle_nonzero_success(res) handle_nonzero_success(returned_bytes) - return out_buffer[:returned_bytes.value] + return out_buffer[: returned_bytes.value] diff --git a/conda/gateways/disk/permissions.py b/conda/gateways/disk/permissions.py index 717bd6b15d8..18a942e5890 100644 --- a/conda/gateways/disk/permissions.py +++ b/conda/gateways/disk/permissions.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from errno import EACCES, ENOENT, EPERM, EROFS from itertools import chain from logging import getLogger @@ -8,9 +7,9 @@ from os.path import isdir, isfile, join from stat import S_IEXEC, S_IMODE, S_ISDIR, S_ISREG, S_IWRITE, S_IXGRP, S_IXOTH, S_IXUSR +from ...common.compat import on_win from . import MAX_TRIES, exp_backoff_fn from .link import islink, lchmod -from ...common.compat import on_win log = getLogger(__name__) @@ -28,7 +27,7 @@ def make_writable(path): log.debug("path cannot be made writable: %s", path) return True except Exception as e: - eno = getattr(e, 'errno', None) + eno = getattr(e, "errno", None) if eno in (ENOENT,): log.debug("tried to make writable, but didn't exist: %s", path) raise @@ -74,7 +73,7 @@ def recursive_make_writable(path, max_tries=MAX_TRIES): def make_executable(path): if isfile(path): mode = lstat(path).st_mode - log.trace('chmod +x %s', path) + log.trace("chmod +x %s", path) chmod(path, S_IMODE(mode) | S_IXUSR | S_IXGRP | S_IXOTH) else: log.error("Cannot make path '%s' executable", path) @@ -82,5 +81,5 @@ def make_executable(path): def is_executable(path): if isfile(path): # for now, leave out `and not islink(path)` - return path.endswith(('.exe', '.bat')) if on_win else access(path, X_OK) + return path.endswith((".exe", ".bat")) if on_win else access(path, X_OK) return False diff --git a/conda/gateways/disk/read.py b/conda/gateways/disk/read.py index d3234084ef2..d84a6bd0873 100644 --- a/conda/gateways/disk/read.py +++ b/conda/gateways/disk/read.py @@ -2,35 +2,37 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations +import hashlib +import json +import os from base64 import b64encode from collections import namedtuple from errno import ENOENT from functools import partial -import hashlib from itertools import chain -import json from logging import getLogger -import os from os.path import isdir, isfile, join # noqa from pathlib import Path -from .link import islink, lexists # noqa -from .create import TemporaryDirectory -from ...deprecations import deprecated from ...auxlib.collection import first from ...auxlib.compat import shlex_split_unicode from ...auxlib.ish import dals from ...base.constants import PREFIX_PLACEHOLDER from ...common.compat import open from ...common.pkg_formats.python import ( - PythonDistribution, PythonEggInfoDistribution, PythonEggLinkDistribution, + PythonDistribution, + PythonEggInfoDistribution, + PythonEggLinkDistribution, PythonInstalledDistribution, ) +from ...deprecations import deprecated from ...exceptions import CondaUpgradeError, CondaVerificationError, PathNotFoundError from ...models.channel import Channel from ...models.enums import FileMode, PackageType, PathType from ...models.package_info import PackageInfo, PackageMetadata from ...models.records import PathData, PathDataV1, PathsData, PrefixRecord +from .create import TemporaryDirectory +from .link import islink, lexists # noqa log = getLogger(__name__) @@ -51,7 +53,7 @@ def yield_lines(path): with open(path) as fh: for line in fh: line = line.strip() - if not line or line.startswith('#'): + if not line or line.startswith("#"): continue yield line except OSError as e: @@ -61,7 +63,9 @@ def yield_lines(path): raise -@deprecated("23.9", "24.3", addendum="Use `conda.gateways.disk.read.compute_sum` instead.") +@deprecated( + "23.9", "24.3", addendum="Use `conda.gateways.disk.read.compute_sum` instead." +) def _digest_path(algo: Literal["md5", "sha256"], path: str | os.PathLike) -> str: return compute_sum(path, algo) @@ -74,7 +78,7 @@ def compute_sum(path: str | os.PathLike, algo: Literal["md5", "sha256"]) -> str: # FUTURE: Python 3.11+, replace with hashlib.file_digest hasher = hashlib.new(algo) with path.open("rb") as fh: - for chunk in iter(partial(fh.read, 8192), b''): + for chunk in iter(partial(fh.read, 8192), b""): hasher.update(chunk) return hasher.hexdigest() @@ -101,6 +105,7 @@ def compute_sha256sum(path: str | os.PathLike) -> str: # functions supporting read_package_info() # #################################################### + def read_package_info(record, package_cache_record): epd = package_cache_record.extracted_package_dir icondata = read_icondata(epd) @@ -113,7 +118,6 @@ def read_package_info(record, package_cache_record): channel=Channel(record.schannel or record.channel), repodata_record=record, url=package_cache_record.url, - icondata=icondata, package_metadata=package_metadata, paths_data=paths_data, @@ -121,38 +125,39 @@ def read_package_info(record, package_cache_record): def read_index_json(extracted_package_directory): - with open(join(extracted_package_directory, 'info', 'index.json')) as fi: + with open(join(extracted_package_directory, "info", "index.json")) as fi: return json.load(fi) def read_index_json_from_tarball(package_tarball_full_path): import conda_package_handling.api + with TemporaryDirectory() as tmpdir: - conda_package_handling.api.extract(package_tarball_full_path, tmpdir, 'info') - with open(join(tmpdir, 'info', 'index.json')) as f: + conda_package_handling.api.extract(package_tarball_full_path, tmpdir, "info") + with open(join(tmpdir, "info", "index.json")) as f: json_data = json.load(f) return json_data def read_repodata_json(extracted_package_directory): - with open(join(extracted_package_directory, 'info', 'repodata_record.json')) as fi: + with open(join(extracted_package_directory, "info", "repodata_record.json")) as fi: return json.load(fi) def read_icondata(extracted_package_directory): - icon_file_path = join(extracted_package_directory, 'info', 'icon.png') + icon_file_path = join(extracted_package_directory, "info", "icon.png") if isfile(icon_file_path): - with open(icon_file_path, 'rb') as f: + with open(icon_file_path, "rb") as f: data = f.read() - return b64encode(data).decode('utf-8') + return b64encode(data).decode("utf-8") else: return None def read_package_metadata(extracted_package_directory): def _paths(): - yield join(extracted_package_directory, 'info', 'link.json') - yield join(extracted_package_directory, 'info', 'package_metadata.json') + yield join(extracted_package_directory, "info", "link.json") + yield join(extracted_package_directory, "info", "package_metadata.json") path = first(_paths(), key=isfile) if not path: @@ -160,38 +165,48 @@ def _paths(): else: with open(path) as f: data = json.loads(f.read()) - if data.get('package_metadata_version') != 1: - raise CondaUpgradeError(dals(""" + if data.get("package_metadata_version") != 1: + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to install this package. (This version only supports link.json schema version 1.) Please update conda to install this package. - """)) + """ + ) + ) package_metadata = PackageMetadata(**data) return package_metadata def read_paths_json(extracted_package_directory): - info_dir = join(extracted_package_directory, 'info') - paths_json_path = join(info_dir, 'paths.json') + info_dir = join(extracted_package_directory, "info") + paths_json_path = join(info_dir, "paths.json") if isfile(paths_json_path): with open(paths_json_path) as paths_json: data = json.load(paths_json) - if data.get('paths_version') != 1: - raise CondaUpgradeError(dals(""" + if data.get("paths_version") != 1: + raise CondaUpgradeError( + dals( + """ The current version of conda is too old to install this package. (This version only supports paths.json schema version 1.) Please update conda to install - this package.""")) + this package.""" + ) + ) paths_data = PathsData( paths_version=1, - paths=(PathDataV1(**f) for f in data['paths']), + paths=(PathDataV1(**f) for f in data["paths"]), ) else: - has_prefix_files = read_has_prefix(join(info_dir, 'has_prefix')) + has_prefix_files = read_has_prefix(join(info_dir, "has_prefix")) no_link = read_no_link(info_dir) def read_files_file(): - files_path = join(info_dir, 'files') - for f in (ln for ln in (line.strip() for line in yield_lines(files_path)) if ln): + files_path = join(info_dir, "files") + for f in ( + ln for ln in (line.strip() for line in yield_lines(files_path)) if ln + ): path_info = {"_path": f} if f in has_prefix_files.keys(): path_info["prefix_placeholder"] = has_prefix_files[f][0] @@ -224,11 +239,11 @@ def read_has_prefix(path): * text * binary """ - ParseResult = namedtuple('ParseResult', ('placeholder', 'filemode', 'filepath')) + ParseResult = namedtuple("ParseResult", ("placeholder", "filemode", "filepath")) def parse_line(line): # placeholder, filemode, filepath - parts = tuple(x.strip('"\'') for x in shlex_split_unicode(line, posix=False)) + parts = tuple(x.strip("\"'") for x in shlex_split_unicode(line, posix=False)) if len(parts) == 1: return ParseResult(PREFIX_PLACEHOLDER, FileMode.text, parts[0]) elif len(parts) == 3: @@ -241,8 +256,12 @@ def parse_line(line): def read_no_link(info_dir): - return set(chain(yield_lines(join(info_dir, 'no_link')), - yield_lines(join(info_dir, 'no_softlink')))) + return set( + chain( + yield_lines(join(info_dir, "no_link")), + yield_lines(join(info_dir, "no_softlink")), + ) + ) def read_soft_links(extracted_package_directory, files): @@ -263,11 +282,18 @@ def read_python_record(prefix_path, anchor_file, python_version): package_type = PackageType.VIRTUAL_PYTHON_WHEEL paths_tups = pydist.get_paths() - paths_data = PathsData(paths_version=1, paths=( - PathDataV1( - _path=path, path_type=PathType.hardlink, sha256=checksum, size_in_bytes=size - ) for (path, checksum, size) in paths_tups - )) + paths_data = PathsData( + paths_version=1, + paths=( + PathDataV1( + _path=path, + path_type=PathType.hardlink, + sha256=checksum, + size_in_bytes=size, + ) + for (path, checksum, size) in paths_tups + ), + ) files = tuple(p[0] for p in paths_tups) elif isinstance(pydist, PythonEggLinkDistribution): @@ -285,9 +311,12 @@ def read_python_record(prefix_path, anchor_file, python_version): paths_tups = pydist.get_paths() files = tuple(p[0] for p in paths_tups) - paths_data = PathsData(paths_version=1, paths=( - PathData(_path=path, path_type=PathType.hardlink) for path in files - )) + paths_data = PathsData( + paths_version=1, + paths=( + PathData(_path=path, path_type=PathType.hardlink) for path in files + ), + ) else: package_type = PackageType.VIRTUAL_PYTHON_EGG_UNMANAGEABLE paths_data, files = PathsData(paths_version=1, paths=()), () diff --git a/conda/gateways/disk/test.py b/conda/gateways/disk/test.py index 49b259abd81..4947d169059 100644 --- a/conda/gateways/disk/test.py +++ b/conda/gateways/disk/test.py @@ -1,18 +1,17 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from functools import lru_cache from logging import getLogger from os import W_OK, access from os.path import basename, dirname, isdir, isfile, join from uuid import uuid4 -from .create import create_link -from .delete import rm_rf -from .link import islink, lexists from ...base.constants import PREFIX_MAGIC_FILE from ...common.path import expand from ...models.enums import LinkType +from .create import create_link +from .delete import rm_rf +from .link import islink, lexists log = getLogger(__name__) @@ -23,7 +22,7 @@ def file_path_is_writable(path): if isdir(dirname(path)): path_existed = lexists(path) try: - fh = open(path, 'a+') + fh = open(path, "a+") except OSError as e: log.debug(e) return False @@ -67,7 +66,7 @@ def softlink_supported(source_file, dest_dir): # On Windows, softlink creation is restricted to Administrative users by default. It can # optionally be enabled for non-admin users through explicit registry modification. log.trace("checking soft link capability for %s => %s", source_file, dest_dir) - test_path = join(dest_dir, '.tmp.' + basename(source_file)) + test_path = join(dest_dir, ".tmp." + basename(source_file)) assert isfile(source_file), source_file assert isdir(dest_dir), dest_dir assert not lexists(test_path), test_path diff --git a/conda/gateways/disk/update.py b/conda/gateways/disk/update.py index 582fbcaf7ae..d7ead331813 100644 --- a/conda/gateways/disk/update.py +++ b/conda/gateways/disk/update.py @@ -1,29 +1,28 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from contextlib import contextmanager -from errno import EINVAL, EXDEV, EPERM -from logging import getLogger import os -from os.path import dirname, isdir, split, basename, join, exists import re -from shutil import move -from subprocess import Popen, PIPE import tempfile +from contextlib import contextmanager +from errno import EINVAL, EPERM, EXDEV +from logging import getLogger +from os.path import basename, dirname, exists, isdir, join, split +from shutil import move +from subprocess import PIPE, Popen from typing import Optional -from . import exp_backoff_fn, mkdir_p, mkdir_p_sudo_safe -from .delete import rm_rf -from .link import lexists -from ...base.context import context from ...base.constants import DRY_RUN_PREFIX +from ...base.context import context from ...common.compat import on_win from ...common.path import expand from ...exceptions import NotWritableError +from . import exp_backoff_fn, mkdir_p, mkdir_p_sudo_safe +from .delete import rm_rf +from .link import lexists log = getLogger(__name__) -SHEBANG_REGEX = re.compile(br'^(#!((?:\\ |[^ \n\r])+)(.*))') +SHEBANG_REGEX = re.compile(rb"^(#!((?:\\ |[^ \n\r])+)(.*))") class CancelOperation(Exception): @@ -36,7 +35,7 @@ def update_file_in_place_as_binary(file_full_path, callback): # this method updates the file in-place, without releasing the file lock fh = None try: - fh = exp_backoff_fn(open, file_full_path, 'rb+') + fh = exp_backoff_fn(open, file_full_path, "rb+") log.trace("in-place update path locked for %s", file_full_path) data = fh.read() fh.seek(0) @@ -60,25 +59,39 @@ def rename(source_path, destination_path, force=False): try: os.rename(source_path, destination_path) except OSError as e: - if (on_win and dirname(source_path) == dirname(destination_path) - and os.path.isfile(source_path)): + if ( + on_win + and dirname(source_path) == dirname(destination_path) + and os.path.isfile(source_path) + ): condabin_dir = join(context.conda_prefix, "condabin") - rename_script = join(condabin_dir, 'rename_tmp.bat') + rename_script = join(condabin_dir, "rename_tmp.bat") if exists(rename_script): _dirname, _src_fn = split(source_path) _dest_fn = basename(destination_path) - p = Popen(['cmd.exe', '/C', rename_script, _dirname, - _src_fn, _dest_fn], stdout=PIPE, stderr=PIPE) + p = Popen( + ["cmd.exe", "/C", rename_script, _dirname, _src_fn, _dest_fn], + stdout=PIPE, + stderr=PIPE, + ) stdout, stderr = p.communicate() else: - log.debug("{} is missing. Conda was not installed correctly or has been " - "corrupted. Please file an issue on the conda github repo." - .format(rename_script)) + log.debug( + "{} is missing. Conda was not installed correctly or has been " + "corrupted. Please file an issue on the conda github repo.".format( + rename_script + ) + ) elif e.errno in (EINVAL, EXDEV, EPERM): # https://github.com/conda/conda/issues/6811 # https://github.com/conda/conda/issues/6711 - log.trace("Could not rename %s => %s due to errno [%s]. Falling back" - " to copy/unlink", source_path, destination_path, e.errno) + log.trace( + "Could not rename %s => %s due to errno [%s]. Falling back" + " to copy/unlink", + source_path, + destination_path, + e.errno, + ) # https://github.com/moby/moby/issues/25409#issuecomment-238537855 # shutil.move() falls back to copy+unlink move(source_path, destination_path) @@ -89,7 +102,9 @@ def rename(source_path, destination_path, force=False): @contextmanager -def rename_context(source: str, destination: Optional[str] = None, dry_run: bool = False): +def rename_context( + source: str, destination: Optional[str] = None, dry_run: bool = False +): """ Used for removing a directory when there are dependent actions (i.e. you need to ensure other actions succeed before removing it). @@ -140,7 +155,7 @@ def touch(path, mkdir=False, sudo_safe=False): mkdir_p(dirpath) else: assert isdir(dirname(path)) - with open(path, 'a'): + with open(path, "a"): pass # This chown call causes a false positive PermissionError to be # raised (similar to #7109) when called in an environment which diff --git a/conda/gateways/logging.py b/conda/gateways/logging.py index 64446f2a164..189374f3b9c 100644 --- a/conda/gateways/logging.py +++ b/conda/gateways/logging.py @@ -1,15 +1,23 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from functools import lru_cache, partial import logging -from logging import DEBUG, ERROR, Filter, Formatter, INFO, StreamHandler, WARN, getLogger import re import sys from datetime import datetime +from functools import lru_cache, partial +from logging import ( + DEBUG, + ERROR, + INFO, + WARN, + Filter, + Formatter, + StreamHandler, + getLogger, +) from .. import CondaError -from ..common.io import attach_stderr_handler, _FORMATTER +from ..common.io import _FORMATTER, attach_stderr_handler log = getLogger(__name__) TRACE = 5 # TRACE LOG LEVEL @@ -18,33 +26,34 @@ class TokenURLFilter(Filter): TOKEN_URL_PATTERN = re.compile( - r'(|https?://)' # \1 scheme - r'(|\s' # \2 space, or - r'|(?:(?:\d{1,3}\.){3}\d{1,3})' # ipv4, or - r'|(?:' # domain name - r'(?:[a-zA-Z0-9-]{1,20}\.){0,10}' # non-tld - r'(?:[a-zA-Z]{2}[a-zA-Z0-9-]{0,18})' # tld - r'))' # end domain name - r'(|:\d{1,5})?' # \3 port - r'/t/[a-z0-9A-Z-]+/' # token + r"(|https?://)" # \1 scheme + r"(|\s" # \2 space, or + r"|(?:(?:\d{1,3}\.){3}\d{1,3})" # ipv4, or + r"|(?:" # domain name + r"(?:[a-zA-Z0-9-]{1,20}\.){0,10}" # non-tld + r"(?:[a-zA-Z]{2}[a-zA-Z0-9-]{0,18})" # tld + r"))" # end domain name + r"(|:\d{1,5})?" # \3 port + r"/t/[a-z0-9A-Z-]+/" # token ) - TOKEN_REPLACE = partial(TOKEN_URL_PATTERN.sub, r'\1\2\3/t//') + TOKEN_REPLACE = partial(TOKEN_URL_PATTERN.sub, r"\1\2\3/t//") def filter(self, record): - ''' + """ Since Python 2's getMessage() is incapable of handling any strings that are not unicode when it interpolates the message with the arguments, we fix that here by doing it ourselves. At the same time we replace tokens in the arguments which was not happening until now. - ''' + """ record.msg = self.TOKEN_REPLACE(record.msg) if record.args: - new_args = tuple(self.TOKEN_REPLACE(arg) - if isinstance(arg, str) else arg - for arg in record.args) + new_args = tuple( + self.TOKEN_REPLACE(arg) if isinstance(arg, str) else arg + for arg in record.args + ) record.msg = record.msg % new_args record.args = None return True @@ -53,7 +62,7 @@ def filter(self, record): class StdStreamHandler(StreamHandler): """Log StreamHandler that always writes to the current sys stream.""" - terminator = '\n' + terminator = "\n" def __init__(self, sys_stream): """ @@ -66,11 +75,11 @@ def __init__(self, sys_stream): def __getattr__(self, attr): # always get current sys.stdout/sys.stderr, unless self.stream has been set explicitly - if attr == 'stream': + if attr == "stream": return getattr(sys, self.sys_stream) return super().__getattribute__(attr) - ''' + """ def emit(self, record): # in contrast to the Python 2.7 StreamHandler, this has no special Unicode handling; # however, this backports the Python >=3.2 terminator attribute and additionally makes it @@ -86,7 +95,7 @@ def emit(self, record): except Exception: self.handleError(record) - ''' + """ # Updated Python 2.7.15's stdlib, with terminator and unicode support. def emit(self, record): @@ -115,7 +124,9 @@ def emit(self, record): stream.write(fs % msg) else: try: - if isinstance(msg, unicode) and getattr(stream, "encoding", None): # NOQA + if isinstance(msg, unicode) and getattr( + stream, "encoding", None + ): # NOQA ufs = "%s" try: stream.write(ufs % msg) @@ -159,8 +170,8 @@ def initialize_std_loggers(): # corresponding sys streams, filter token urls and don't propagate. formatter = Formatter("%(message)s") - for stream in ('stdout', 'stderr'): - logger = getLogger('conda.%s' % stream) + for stream in ("stdout", "stderr"): + logger = getLogger("conda.%s" % stream) logger.handlers = [] logger.setLevel(INFO) handler = StdStreamHandler(stream) @@ -170,20 +181,20 @@ def initialize_std_loggers(): logger.addFilter(TokenURLFilter()) logger.propagate = False - stdlog_logger = getLogger('conda.%slog' % stream) + stdlog_logger = getLogger("conda.%slog" % stream) stdlog_logger.handlers = [] stdlog_logger.setLevel(DEBUG) stdlog_handler = StdStreamHandler(stream) - stdlog_handler.terminator = '' + stdlog_handler.terminator = "" stdlog_handler.setLevel(DEBUG) stdlog_handler.setFormatter(formatter) stdlog_logger.addHandler(stdlog_handler) stdlog_logger.propagate = False - verbose_logger = getLogger('conda.stdout.verbose') + verbose_logger = getLogger("conda.stdout.verbose") verbose_logger.handlers = [] verbose_logger.setLevel(INFO) - verbose_handler = StdStreamHandler('stdout') + verbose_handler = StdStreamHandler("stdout") verbose_handler.setLevel(INFO) verbose_handler.setFormatter(formatter) verbose_logger.addHandler(verbose_handler) @@ -207,8 +218,8 @@ def set_all_logger_level(level=DEBUG): set_conda_log_level(level) # 'requests' loggers get their own handlers so that they always output messages in long format # regardless of the level. - attach_stderr_handler(level, 'requests') - attach_stderr_handler(level, 'requests.packages.urllib3') + attach_stderr_handler(level, "requests") + attach_stderr_handler(level, "requests.packages.urllib3") @lru_cache(maxsize=None) @@ -228,8 +239,10 @@ def set_verbosity(verbosity_level): try: set_all_logger_level(VERBOSITY_LEVELS[verbosity_level]) except IndexError: - raise CondaError("Invalid verbosity level: %(verbosity_level)s", - verbosity_level=verbosity_level) + raise CondaError( + "Invalid verbosity level: %(verbosity_level)s", + verbosity_level=verbosity_level, + ) log.debug("verbosity set to %s", verbosity_level) diff --git a/conda/gateways/repodata/__init__.py b/conda/gateways/repodata/__init__.py index 61f5e3cb6d0..5f18b011898 100644 --- a/conda/gateways/repodata/__init__.py +++ b/conda/gateways/repodata/__init__.py @@ -31,8 +31,8 @@ UnavailableInvalidChannel, ) from conda.gateways.connection import ( - ConnectionError, ChunkedEncodingError, + ConnectionError, HTTPError, InsecureRequestWarning, InvalidSchema, @@ -118,7 +118,10 @@ def repodata(self, state: RepodataState) -> str | None: url = join_url(self._url, filename) with conda_http_errors(self._url, filename): - timeout = context.remote_connect_timeout_secs, context.remote_read_timeout_secs + timeout = ( + context.remote_connect_timeout_secs, + context.remote_read_timeout_secs, + ) response: Response = session.get( url, headers=headers, proxies=session.proxies, timeout=timeout ) @@ -138,7 +141,9 @@ def repodata(self, state: RepodataState) -> str | None: saved_fields = {"_url": self._url} _add_http_value_to_dict(response, "Etag", saved_fields, "_etag") _add_http_value_to_dict(response, "Last-Modified", saved_fields, "_mod") - _add_http_value_to_dict(response, "Cache-Control", saved_fields, "_cache_control") + _add_http_value_to_dict( + response, "Cache-Control", saved_fields, "_cache_control" + ) state.clear() state.update(saved_fields) @@ -296,14 +301,18 @@ def conda_http_errors(url, repodata_fn): a support request with your network engineering team. %s -""" % maybe_unquote(repr(url)) +""" % maybe_unquote( + repr(url) + ) else: help_message = """\ An HTTP error occurred when trying to retrieve this URL. HTTP errors are often intermittent, and a simple retry will get you on your way. %s -""" % maybe_unquote(repr(url)) +""" % maybe_unquote( + repr(url) + ) raise CondaHTTPError( help_message, @@ -369,8 +378,12 @@ def save(self): """ serialized = dict(self) json_stat = self.cache_path_json.stat() - serialized.update({"mtime_ns": json_stat.st_mtime_ns, "size": json_stat.st_size}) - return pathlib.Path(self.cache_path_state).write_text(json.dumps(serialized, indent=True)) + serialized.update( + {"mtime_ns": json_stat.st_mtime_ns, "size": json_stat.st_size} + ) + return pathlib.Path(self.cache_path_state).write_text( + json.dumps(serialized, indent=True) + ) @property def mod(self) -> str: @@ -429,7 +442,9 @@ def has_format(self, format: str) -> tuple[bool, datetime.datetime | None]: value = bool(obj["value"]) return (value, last_checked) except (KeyError, ValueError, TypeError) as e: - log.warn("error parsing `has_` object from `.state.json`", exc_info=e) + log.warn( + "error parsing `has_` object from `.state.json`", exc_info=e + ) self.pop(key) return False, datetime.datetime.now(tz=datetime.timezone.utc) @@ -492,10 +507,11 @@ def __init__(self, base, repodata_fn): cache_path_base = pathlib.Path(base) self.cache_dir = cache_path_base.parent self.name = cache_path_base.name - self.repodata_fn = ( - repodata_fn # XXX can we skip repodata_fn or include the full url for debugging + # XXX can we skip repodata_fn or include the full url for debugging + self.repodata_fn = repodata_fn + self.state = RepodataState( + self.cache_path_json, self.cache_path_state, repodata_fn ) - self.state = RepodataState(self.cache_path_json, self.cache_path_state, repodata_fn) @property def cache_path_json(self): diff --git a/conda/gateways/repodata/jlap/core.py b/conda/gateways/repodata/jlap/core.py index 4a862cce762..68e14037336 100644 --- a/conda/gateways/repodata/jlap/core.py +++ b/conda/gateways/repodata/jlap/core.py @@ -77,7 +77,9 @@ def from_lines(cls, lines: Iterable[bytes], iv: bytes, pos=0, verify=True): def from_path(cls, path: Path | str, verify=True): # in binary mode, line separator is hardcoded as \n with Path(path).open("rb") as p: - return cls.from_lines((line.rstrip(b"\n") for line in p), b"", verify=verify) + return cls.from_lines( + (line.rstrip(b"\n") for line in p), b"", verify=verify + ) def add(self, line: str): """ @@ -99,7 +101,9 @@ def add(self, line: str): # include last line's utf-8 encoded length, plus 1 in pos? pos += len(last_line.encode("utf-8")) + 1 self.extend( - JLAP.from_lines((line.encode("utf-8"),), bytes.fromhex(iv), pos, verify=False)[1:] + JLAP.from_lines( + (line.encode("utf-8"),), bytes.fromhex(iv), pos, verify=False + )[1:] ) return self diff --git a/conda/gateways/repodata/jlap/fetch.py b/conda/gateways/repodata/jlap/fetch.py index 962704b4df4..fd61403d953 100644 --- a/conda/gateways/repodata/jlap/fetch.py +++ b/conda/gateways/repodata/jlap/fetch.py @@ -2,20 +2,20 @@ # SPDX-License-Identifier: BSD-3-Clause # Lappin' up the jlap from __future__ import annotations -import io +import io import json import logging import pathlib import pprint import re import time -import zstandard from contextlib import contextmanager from hashlib import blake2b from typing import Iterator import jsonpatch +import zstandard from requests import HTTPError from conda.base.context import context @@ -89,11 +89,15 @@ def lines() -> Iterator[bytes]: def fetch_jlap(url, pos=0, etag=None, iv=b"", ignore_etag=True, session=None): - response = request_jlap(url, pos=pos, etag=etag, ignore_etag=ignore_etag, session=session) + response = request_jlap( + url, pos=pos, etag=etag, ignore_etag=ignore_etag, session=session + ) return process_jlap_response(response, pos=pos, iv=iv) -def request_jlap(url, pos=0, etag=None, ignore_etag=True, session: Session | None = None): +def request_jlap( + url, pos=0, etag=None, ignore_etag=True, session: Session | None = None +): """ Return the part of the remote .jlap file we are interested in. """ @@ -118,7 +122,11 @@ def request_jlap(url, pos=0, etag=None, ignore_etag=True, session: Session | Non { k: v for k, v in response.headers.items() - if any(map(k.lower().__contains__, ("content", "last", "range", "encoding"))) + if any( + map( + k.lower().__contains__, ("content", "last", "range", "encoding") + ) + ) } ), ) @@ -152,7 +160,9 @@ def find_patches(patches, have, want): break if patch["to"] == want: log.info( - "Collect %s \N{LEFTWARDS ARROW} %s", format_hash(want), format_hash(patch["from"]) + "Collect %s \N{LEFTWARDS ARROW} %s", + format_hash(want), + format_hash(patch["from"]), ) apply.append(patch) want = patch["from"] @@ -243,9 +253,13 @@ def download_and_hash( def request_url_jlap_state( - url, state: RepodataState, get_place=get_place, full_download=False, *, session: Session + url, + state: RepodataState, + get_place=get_place, + full_download=False, + *, + session: Session, ): - jlap_state = state.get(JLAP_KEY, {}) headers = jlap_state.get(HEADERS, {}) @@ -260,7 +274,6 @@ def request_url_jlap_state( ): hasher = hash() with timeme(f"Download complete {url} "): - # Don't deal with 304 Not Modified if hash unavailable e.g. if # cached without jlap if NOMINAL_HASH not in state: @@ -287,7 +300,11 @@ def request_url_jlap_state( state.set_has_format("zst", False) state[ZSTD_UNAVAILABLE] = time.time_ns() # alternate method response = download_and_hash( - hasher, withext(url, ".json"), json_path, session=session, state=state + hasher, + withext(url, ".json"), + json_path, + session=session, + state=state, ) # will we use state['headers'] for caching against @@ -344,7 +361,9 @@ def request_url_jlap_state( except (ValueError, IndexError) as e: log.exception("Error parsing jlap", exc_info=e) # a 'latest' hash that we can't achieve, triggering later error handling - buffer = JLAP([[-1, "", ""], [0, json.dumps({LATEST: "0" * 32}), ""], [1, "", ""]]) + buffer = JLAP( + [[-1, "", ""], [0, json.dumps({LATEST: "0" * 32}), ""], [1, "", ""]] + ) state.set_has_format("jlap", False) state[JLAP_KEY] = jlap_state @@ -376,9 +395,10 @@ def request_url_jlap_state( apply_patches(repodata_json, apply) with timeme("Write changed "), json_path.open("wb") as repodata: - hasher = hash() - HashWriter(repodata, hasher).write(json.dumps(repodata_json).encode("utf-8")) + HashWriter(repodata, hasher).write( + json.dumps(repodata_json).encode("utf-8") + ) # actual hash of serialized json state[ON_DISK_HASH] = hasher.hexdigest() diff --git a/conda/gateways/repodata/lock.py b/conda/gateways/repodata/lock.py index c7ab7bd1a77..30448e4fcf0 100644 --- a/conda/gateways/repodata/lock.py +++ b/conda/gateways/repodata/lock.py @@ -59,7 +59,9 @@ def __enter__(self): for attempt in range(LOCK_ATTEMPTS): try: # msvcrt locking does something similar - fcntl.lockf(self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 1, LOCK_BYTE) + fcntl.lockf( + self.fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 1, LOCK_BYTE + ) break except OSError: if attempt > LOCK_ATTEMPTS - 2: diff --git a/conda/gateways/subprocess.py b/conda/gateways/subprocess.py index 33406558536..2f43c5f8f71 100644 --- a/conda/gateways/subprocess.py +++ b/conda/gateways/subprocess.py @@ -1,28 +1,30 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import os +import sys from collections import namedtuple from logging import getLogger -import os from os.path import abspath +from subprocess import PIPE, CalledProcessError, Popen + from conda.auxlib.compat import shlex_split_unicode -import sys -from subprocess import CalledProcessError, PIPE, Popen -from ..utils import wrap_subprocess_call -from .logging import TRACE from .. import ACTIVE_SUBPROCESSES from ..auxlib.ish import dals +from ..base.context import context from ..common.compat import encode_arguments, encode_environment, isiterable from ..gateways.disk.delete import rm_rf -from ..base.context import context +from ..utils import wrap_subprocess_call +from .logging import TRACE log = getLogger(__name__) -Response = namedtuple('Response', ('stdout', 'stderr', 'rc')) +Response = namedtuple("Response", ("stdout", "stderr", "rc")) def _format_output(command_str, cwd, rc, stdout, stderr): - return dals(""" + return ( + dals( + """ $ %s ==> cwd: %s <== ==> exit code: %d <== @@ -30,7 +32,10 @@ def _format_output(command_str, cwd, rc, stdout, stderr): %s ==> stderr <== %s - """) % (command_str, cwd, rc, stdout, stderr) + """ + ) + % (command_str, cwd, rc, stdout, stderr) + ) def any_subprocess(args, prefix, env=None, cwd=None): @@ -51,20 +56,24 @@ def any_subprocess(args, prefix, env=None, cwd=None): ) stdout, stderr = process.communicate() if script_caller is not None: - if 'CONDA_TEST_SAVE_TEMPS' not in os.environ: + if "CONDA_TEST_SAVE_TEMPS" not in os.environ: rm_rf(script_caller) else: - log.warning('CONDA_TEST_SAVE_TEMPS :: retaining pip run_script {}'.format( - script_caller)) - if hasattr(stdout, 'decode'): - stdout = stdout.decode('utf-8', errors='replace') - if hasattr(stderr, 'decode'): - stderr = stderr.decode('utf-8', errors='replace') + log.warning( + "CONDA_TEST_SAVE_TEMPS :: retaining pip run_script {}".format( + script_caller + ) + ) + if hasattr(stdout, "decode"): + stdout = stdout.decode("utf-8", errors="replace") + if hasattr(stderr, "decode"): + stderr = stderr.decode("utf-8", errors="replace") return stdout, stderr, process.returncode -def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=True, - capture_output=True): +def subprocess_call( + command, env=None, path=None, stdin=None, raise_on_error=True, capture_output=True +): """This utility function should be preferred for all conda subprocessing. It handles multiple tricky details. """ @@ -72,7 +81,7 @@ def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=Tru cwd = sys.prefix if path is None else abspath(path) if not isiterable(command): command = shlex_split_unicode(command) - command_str = command if isinstance(command, str) else ' '.join(command) + command_str = command if isinstance(command, str) else " ".join(command) log.debug("executing>> %s", command_str) pipe = None @@ -97,9 +106,9 @@ def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=Tru # decode output, if not PIPE, stdout/stderr will be None stdout, stderr = process.communicate(input=stdin) if hasattr(stdout, "decode"): - stdout = stdout.decode('utf-8', errors='replace') + stdout = stdout.decode("utf-8", errors="replace") if hasattr(stderr, "decode"): - stderr = stderr.decode('utf-8', errors='replace') + stderr = stderr.decode("utf-8", errors="replace") rc = process.returncode ACTIVE_SUBPROCESSES.remove(process) @@ -107,8 +116,7 @@ def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=Tru formatted_output = _format_output(command_str, cwd, rc, stdout, stderr) if raise_on_error and rc != 0: log.info(formatted_output) - raise CalledProcessError(rc, command, - output=formatted_output) + raise CalledProcessError(rc, command, output=formatted_output) if log.isEnabledFor(TRACE): log.trace(formatted_output) @@ -118,20 +126,29 @@ def subprocess_call(command, env=None, path=None, stdin=None, raise_on_error=Tru def _subprocess_clean_env(env, clean_python=True, clean_conda=True): dels = [] if clean_python: - dels.extend(('PYTHONPATH', 'PYTHONHOME')) + dels.extend(("PYTHONPATH", "PYTHONHOME")) if clean_conda: - dels.extend(('CONDA_ROOT', 'CONDA_PROMPT_MODIFIER', - 'CONDA_EXE', 'CONDA_DEFAULT_ENV')) + dels.extend( + ("CONDA_ROOT", "CONDA_PROMPT_MODIFIER", "CONDA_EXE", "CONDA_DEFAULT_ENV") + ) for key in dels: if key in env: del env[key] -def subprocess_call_with_clean_env(command, path=None, stdin=None, raise_on_error=True, - clean_python=True, clean_conda=True): +def subprocess_call_with_clean_env( + command, + path=None, + stdin=None, + raise_on_error=True, + clean_python=True, + clean_conda=True, +): # Any of these env vars are likely to mess the whole thing up. # This has been seen to be the case with PYTHONPATH. env = os.environ.copy() _subprocess_clean_env(env, clean_python, clean_conda) # env['CONDA_DLL_SEARCH_MODIFICATION_ENABLE'] = '1' - return subprocess_call(command, env=env, path=path, stdin=stdin, raise_on_error=raise_on_error) + return subprocess_call( + command, env=env, path=path, stdin=stdin, raise_on_error=raise_on_error + ) diff --git a/conda/history.py b/conda/history.py index 31e181acfdc..e5ad41acb10 100644 --- a/conda/history.py +++ b/conda/history.py @@ -1,23 +1,21 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from ast import literal_eval import codecs -from errno import EACCES, EPERM, EROFS import logging -from operator import itemgetter import os -from os.path import isdir, isfile, join import re import sys -from textwrap import dedent import time import warnings +from ast import literal_eval +from errno import EACCES, EPERM, EROFS +from itertools import islice +from operator import itemgetter +from os.path import isdir, isfile, join +from textwrap import dedent from conda.common.iterators import groupby_to_dict as groupby -from itertools import islice - from . import __version__ as CONDA_VERSION from .auxlib.ish import dals from .base.constants import DEFAULTS_CHANNEL_NAME @@ -28,8 +26,8 @@ from .exceptions import CondaHistoryError, NotWritableError from .gateways.disk.update import touch from .models.dist import dist_str_to_quad -from .models.version import VersionOrder, version_relation_re from .models.match_spec import MatchSpec +from .models.version import VersionOrder, version_relation_re log = logging.getLogger(__name__) @@ -45,7 +43,7 @@ def write_head(fo): def is_diff(content): - return any(s.startswith(('-', '+')) for s in content) + return any(s.startswith(("-", "+")) for s in content) def pretty_diff(diff): @@ -55,10 +53,10 @@ def pretty_diff(diff): fn = s[1:] name, version, _, channel = dist_str_to_quad(fn) if channel != DEFAULTS_CHANNEL_NAME: - version += ' (%s)' % channel - if s.startswith('-'): + version += " (%s)" % channel + if s.startswith("-"): removed[name.lower()] = version - elif s.startswith('+'): + elif s.startswith("+"): added[name.lower()] = version changed = set(added) & set(removed) for name in sorted(changed): @@ -77,15 +75,14 @@ def pretty_content(content): class History: - - com_pat = re.compile(r'#\s*cmd:\s*(.+)') - spec_pat = re.compile(r'#\s*(\w+)\s*specs:\s*(.+)?') - conda_v_pat = re.compile(r'#\s*conda version:\s*(.+)') + com_pat = re.compile(r"#\s*cmd:\s*(.+)") + spec_pat = re.compile(r"#\s*(\w+)\s*specs:\s*(.+)?") + conda_v_pat = re.compile(r"#\s*conda version:\s*(.+)") def __init__(self, prefix): self.prefix = prefix - self.meta_dir = join(prefix, 'conda-meta') - self.path = join(self.meta_dir, 'history') + self.meta_dir = join(prefix, "conda-meta") + self.path = join(self.meta_dir, "history") def __enter__(self): self.init_log_file() @@ -127,7 +124,7 @@ def parse(self): res = [] if not isfile(self.path): return res - sep_pat = re.compile(r'==>\s*(.+?)\s*<==') + sep_pat = re.compile(r"==>\s*(.+?)\s*<==") with open(self.path) as f: lines = f.read().splitlines() for line in lines: @@ -137,7 +134,7 @@ def parse(self): m = sep_pat.match(line) if m: res.append((m.group(1), set(), [])) - elif line.startswith('#'): + elif line.startswith("#"): res[-1][2].append(line) elif len(res) > 0: res[-1][1].add(line) @@ -154,11 +151,11 @@ def _parse_old_format_specs_string(specs_string): - "python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0" """ specs = [] - for spec in specs_string.split(','): + for spec in specs_string.split(","): # If the spec starts with a version qualifier, then it actually belongs to the # previous spec. But don't try to join if there was no previous spec. if version_relation_re.match(spec) and specs: - specs[-1] = ','.join([specs[-1], spec]) + specs[-1] = ",".join([specs[-1], spec]) else: specs.append(spec) return specs @@ -179,33 +176,33 @@ def _parse_comment_line(cls, line): m = cls.com_pat.match(line) if m: argv = m.group(1).split() - if argv[0].endswith('conda'): - argv[0] = 'conda' - item['cmd'] = argv + if argv[0].endswith("conda"): + argv[0] = "conda" + item["cmd"] = argv m = cls.conda_v_pat.match(line) if m: - item['conda_version'] = m.group(1) + item["conda_version"] = m.group(1) m = cls.spec_pat.match(line) if m: action, specs_string = m.groups() specs_string = specs_string or "" - item['action'] = action + item["action"] = action - if specs_string.startswith('['): + if specs_string.startswith("["): specs = literal_eval(specs_string) - elif '[' not in specs_string: + elif "[" not in specs_string: specs = History._parse_old_format_specs_string(specs_string) - specs = [spec for spec in specs if spec and not spec.endswith('@')] + specs = [spec for spec in specs if spec and not spec.endswith("@")] - if specs and action in ('update', 'install', 'create'): - item['update_specs'] = item['specs'] = specs - elif specs and action in ('remove', 'uninstall'): - item['remove_specs'] = item['specs'] = specs - elif specs and action in ('neutered', ): - item['neutered_specs'] = item['specs'] = specs + if specs and action in ("update", "install", "create"): + item["update_specs"] = item["specs"] = specs + elif specs and action in ("remove", "uninstall"): + item["remove_specs"] = item["specs"] = specs + elif specs and action in ("neutered",): + item["neutered_specs"] = item["specs"] = specs return item @@ -220,48 +217,63 @@ def get_user_requests(self): """ res = [] for dt, unused_cont, comments in self.parse(): - item = {'date': dt} + item = {"date": dt} for line in comments: comment_items = self._parse_comment_line(line) item.update(comment_items) - if 'cmd' in item: + if "cmd" in item: res.append(item) dists = groupby(itemgetter(0), unused_cont) - item['unlink_dists'] = dists.get('-', ()) - item['link_dists'] = dists.get('+', ()) + item["unlink_dists"] = dists.get("-", ()) + item["link_dists"] = dists.get("+", ()) - conda_versions_from_history = tuple(x['conda_version'] for x in res - if 'conda_version' in x) + conda_versions_from_history = tuple( + x["conda_version"] for x in res if "conda_version" in x + ) if conda_versions_from_history and not context.allow_conda_downgrades: - minimum_conda_version = sorted(conda_versions_from_history, key=VersionOrder)[-1] + minimum_conda_version = sorted( + conda_versions_from_history, key=VersionOrder + )[-1] minimum_major_minor = ".".join(islice(minimum_conda_version.split("."), 2)) current_major_minor = ".".join(islice(CONDA_VERSION.split("."), 2)) if VersionOrder(current_major_minor) < VersionOrder(minimum_major_minor): - message = dals(""" + message = ( + dals( + """ This environment has previously been operated on by a conda version that's newer than the conda currently being used. A newer version of conda is required. target environment location: %(target_prefix)s current conda version: %(conda_version)s minimum conda version: %(minimum_version)s - """) % { - "target_prefix": self.prefix, - "conda_version": CONDA_VERSION, - "minimum_version": minimum_major_minor, - } + """ + ) + % { + "target_prefix": self.prefix, + "conda_version": CONDA_VERSION, + "minimum_version": minimum_major_minor, + } + ) if not paths_equal(self.prefix, context.root_prefix): - message += dedent(""" + message += ( + dedent( + """ Update conda and try again. $ conda install -p "%(base_prefix)s" "conda>=%(minimum_version)s" - """) % { - "base_prefix": context.root_prefix, - "minimum_version": minimum_major_minor, - } - message += dedent(""" + """ + ) + % { + "base_prefix": context.root_prefix, + "minimum_version": minimum_major_minor, + } + ) + message += dedent( + """ To work around this restriction, one can also set the config parameter 'allow_conda_downgrades' to False at their own risk. - """) + """ + ) # TODO: we need to rethink this. It's fine as a warning to try to get users # to avoid breaking their system. However, right now it is preventing @@ -274,13 +286,15 @@ def get_requested_specs_map(self): # keys are package names and values are specs spec_map = {} for request in self.get_user_requests(): - remove_specs = (MatchSpec(spec) for spec in request.get('remove_specs', ())) + remove_specs = (MatchSpec(spec) for spec in request.get("remove_specs", ())) for spec in remove_specs: spec_map.pop(spec.name, None) - update_specs = (MatchSpec(spec) for spec in request.get('update_specs', ())) + update_specs = (MatchSpec(spec) for spec in request.get("update_specs", ())) spec_map.update((s.name, s) for s in update_specs) # here is where the neutering takes effect, overriding past values - neutered_specs = (MatchSpec(spec) for spec in request.get('neutered_specs', ())) + neutered_specs = ( + MatchSpec(spec) for spec in request.get("neutered_specs", ()) + ) spec_map.update((s.name, s) for s in neutered_specs) # Conda hasn't always been good about recording when specs have been removed from @@ -300,12 +314,12 @@ def construct_states(self): cur = cont else: for s in cont: - if s.startswith('-'): + if s.startswith("-"): cur.discard(s[1:]) - elif s.startswith('+'): + elif s.startswith("+"): cur.add(s[1:]) else: - raise CondaHistoryError('Did not expect: %s' % s) + raise CondaHistoryError("Did not expect: %s" % s) res.append((dt, cur.copy())) return res @@ -325,9 +339,9 @@ def get_state(self, rev=-1): def print_log(self): for i, (date, content, unused_com) in enumerate(self.parse()): - print('%s (rev %d)' % (date, i)) + print("%s (rev %d)" % (date, i)) for line in pretty_content(content): - print(' %s' % line) + print(" %s" % line) print() def object_log(self): @@ -336,21 +350,21 @@ def object_log(self): # Based on Mateusz's code; provides more details about the # history event event = { - 'date': date, - 'rev': i, - 'install': [], - 'remove': [], - 'upgrade': [], - 'downgrade': [] + "date": date, + "rev": i, + "install": [], + "remove": [], + "upgrade": [], + "downgrade": [], } added = {} removed = {} if is_diff(content): for pkg in content: name, version, build, channel = dist_str_to_quad(pkg[1:]) - if pkg.startswith('+'): + if pkg.startswith("+"): added[name.lower()] = (version, build, channel) - elif pkg.startswith('-'): + elif pkg.startswith("-"): removed[name.lower()] = (version, build, channel) changed = set(added) & set(removed) @@ -358,42 +372,42 @@ def object_log(self): old = removed[name] new = added[name] details = { - 'old': '-'.join((name,) + old), - 'new': '-'.join((name,) + new) + "old": "-".join((name,) + old), + "new": "-".join((name,) + new), } if new > old: - event['upgrade'].append(details) + event["upgrade"].append(details) else: - event['downgrade'].append(details) + event["downgrade"].append(details) for name in sorted(set(removed) - changed): - event['remove'].append('-'.join((name,) + removed[name])) + event["remove"].append("-".join((name,) + removed[name])) for name in sorted(set(added) - changed): - event['install'].append('-'.join((name,) + added[name])) + event["install"].append("-".join((name,) + added[name])) else: for pkg in sorted(content): - event['install'].append(pkg) + event["install"].append(pkg) result.append(event) return result def write_changes(self, last_state, current_state): if not isdir(self.meta_dir): os.makedirs(self.meta_dir) - with codecs.open(self.path, mode='ab', encoding='utf-8') as fo: + with codecs.open(self.path, mode="ab", encoding="utf-8") as fo: write_head(fo) for fn in sorted(last_state - current_state): - fo.write('-%s\n' % fn) + fo.write("-%s\n" % fn) for fn in sorted(current_state - last_state): - fo.write('+%s\n' % fn) + fo.write("+%s\n" % fn) def write_specs(self, remove_specs=(), update_specs=(), neutered_specs=()): remove_specs = [str(MatchSpec(s)) for s in remove_specs] update_specs = [str(MatchSpec(s)) for s in update_specs] neutered_specs = [str(MatchSpec(s)) for s in neutered_specs] if any((update_specs, remove_specs, neutered_specs)): - with codecs.open(self.path, mode='ab', encoding='utf-8') as fh: + with codecs.open(self.path, mode="ab", encoding="utf-8") as fh: if remove_specs: fh.write("# remove specs: %s\n" % remove_specs) if update_specs: @@ -402,8 +416,9 @@ def write_specs(self, remove_specs=(), update_specs=(), neutered_specs=()): fh.write("# neutered specs: %s\n" % neutered_specs) -if __name__ == '__main__': +if __name__ == "__main__": from pprint import pprint + # Don't use in context manager mode---it augments the history every time h = History(sys.prefix) pprint(h.get_user_requests()) diff --git a/conda/instructions.py b/conda/instructions.py index 3c832ae1170..99ec788c303 100644 --- a/conda/instructions.py +++ b/conda/instructions.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger from os.path import isfile, join @@ -13,20 +12,20 @@ log = getLogger(__name__) # op codes -CHECK_FETCH = 'CHECK_FETCH' -FETCH = 'FETCH' -CHECK_EXTRACT = 'CHECK_EXTRACT' -EXTRACT = 'EXTRACT' -RM_EXTRACTED = 'RM_EXTRACTED' -RM_FETCHED = 'RM_FETCHED' -PREFIX = 'PREFIX' -PRINT = 'PRINT' -PROGRESS = 'PROGRESS' -SYMLINK_CONDA = 'SYMLINK_CONDA' -UNLINK = 'UNLINK' -LINK = 'LINK' -UNLINKLINKTRANSACTION = 'UNLINKLINKTRANSACTION' -PROGRESSIVEFETCHEXTRACT = 'PROGRESSIVEFETCHEXTRACT' +CHECK_FETCH = "CHECK_FETCH" +FETCH = "FETCH" +CHECK_EXTRACT = "CHECK_EXTRACT" +EXTRACT = "EXTRACT" +RM_EXTRACTED = "RM_EXTRACTED" +RM_FETCHED = "RM_FETCHED" +PREFIX = "PREFIX" +PRINT = "PRINT" +PROGRESS = "PROGRESS" +SYMLINK_CONDA = "SYMLINK_CONDA" +UNLINK = "UNLINK" +LINK = "LINK" +UNLINKLINKTRANSACTION = "UNLINKLINKTRANSACTION" +PROGRESSIVEFETCHEXTRACT = "PROGRESSIVEFETCHEXTRACT" PROGRESS_COMMANDS = {EXTRACT, RM_EXTRACTED} @@ -45,13 +44,13 @@ @deprecated("23.9", "24.3", addendum="Unused.") def PREFIX_CMD(state, prefix): - state['prefix'] = prefix + state["prefix"] = prefix def PRINT_CMD(state, arg): # pragma: no cover - if arg.startswith(('Unlinking packages', 'Linking packages')): + if arg.startswith(("Unlinking packages", "Linking packages")): return - getLogger('conda.stdout.verbose').info(arg) + getLogger("conda.stdout.verbose").info(arg) def FETCH_CMD(state, package_cache_entry): @@ -99,10 +98,11 @@ def check_files_in_package(source_dir, files): } -OP_ORDER = (RM_FETCHED, - FETCH, - RM_EXTRACTED, - EXTRACT, - UNLINK, - LINK, - ) +OP_ORDER = ( + RM_FETCHED, + FETCH, + RM_EXTRACTED, + EXTRACT, + UNLINK, + LINK, +) diff --git a/conda/lock.py b/conda/lock.py index 17d1b101544..d84c8d3b64d 100644 --- a/conda/lock.py +++ b/conda/lock.py @@ -11,18 +11,18 @@ We don't raise an error if the lock is named with the current PID """ -from glob import glob import logging import os -from os.path import abspath, basename, dirname, isdir, join import time +from glob import glob +from os.path import abspath, basename, dirname, isdir, join from .deprecations import deprecated from .exceptions import LockError deprecated.module("23.3", "23.9", addendum="Use `filelock` instead.") -LOCK_EXTENSION = 'conda_lock' +LOCK_EXTENSION = "conda_lock" # Keep the string "LOCKERROR" in this string so that external # programs can look for it. @@ -34,21 +34,23 @@ """ log = logging.getLogger(__name__) -stdoutlog = logging.getLogger('conda.stdoutlog') +stdoutlog = logging.getLogger("conda.stdoutlog") + def touch(file_name, times=None): - """ Touch function like touch in Unix shell + """Touch function like touch in Unix shell :param file_name: the name of file :param times: the access and modified time Examples: touch("hello_world.py") """ try: - with open(file_name, 'a'): + with open(file_name, "a"): os.utime(file_name, times) except OSError as e: # pragma: no cover log.warn( - "Failed to create lock, do not run conda in parallel processes [errno %d]", e.errno + "Failed to create lock, do not run conda in parallel processes [errno %d]", + e.errno, ) @@ -58,9 +60,9 @@ class FileLock: :param path_to_lock: the path to be locked :param retries: max number of retries """ + def __init__(self, path_to_lock, retries=10): - """ - """ + """ """ self.path_to_lock = abspath(path_to_lock) self.retries = retries self.lock_file_path = f"{self.path_to_lock}.pid{{0}}.{LOCK_EXTENSION}" @@ -75,7 +77,6 @@ def __enter__(self): last_glob_match = None for _ in range(self.retries + 1): - # search, whether there is process already locked on this file glob_result = glob(self.lock_file_glob_str) if glob_result: @@ -94,6 +95,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_value, traceback): from .gateways.disk.delete import rm_rf + rm_rf(self.lock_file_path) @@ -116,13 +118,19 @@ def __init__(self, directory_path, retries=10): # e.g. if locking directory `/conda`, lock file will be `/conda/conda.pidXXXX.conda_lock` self.lock_file_glob_str = f"{lock_path_pre}.pid*.{LOCK_EXTENSION}" # make sure '/' exists - assert isdir(dirname(self.directory_path)), f"{self.directory_path} doesn't exist" + assert isdir( + dirname(self.directory_path) + ), f"{self.directory_path} doesn't exist" if not isdir(self.directory_path): try: os.makedirs(self.directory_path) log.debug("forced to create %s", self.directory_path) except OSError as e: # pragma: no cover - log.warn("Failed to create directory %s [errno %d]", self.directory_path, e.errno) + log.warn( + "Failed to create directory %s [errno %d]", + self.directory_path, + e.errno, + ) Locked = DirectoryLock diff --git a/conda/misc.py b/conda/misc.py index 13bf61fe0ad..4e6ec99ee6b 100644 --- a/conda/misc.py +++ b/conda/misc.py @@ -1,16 +1,15 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - # this module contains miscellaneous stuff which eventually could be moved # into other places -from collections import defaultdict import os -from os.path import abspath, dirname, exists, isdir, isfile, join, relpath import re import shutil import sys +from collections import defaultdict +from os.path import abspath, dirname, exists, isdir, isfile, join, relpath from .base.context import context from .common.compat import on_win, open @@ -21,11 +20,11 @@ from .core.package_cache_data import PackageCacheData, ProgressiveFetchExtract from .core.prefix_data import PrefixData from .exceptions import ( + CondaExitZero, DisallowedPackageError, DryRunExit, PackagesNotFoundError, ParseError, - CondaExitZero, ) from .gateways.disk.delete import rm_rf from .gateways.disk.link import islink, readlink, symlink @@ -54,7 +53,9 @@ def conda_installed_files(prefix, exclude_self_build=False): ) -def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None): +def explicit( + specs, prefix, verbose=False, force_extract=True, index_args=None, index=None +): actions = defaultdict(list) actions["PREFIX"] = prefix @@ -104,13 +105,17 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, ) # Assert that every spec has a PackageCacheRecord - specs_with_missing_pcrecs = [str(spec) for spec, pcrec in specs_pcrecs if pcrec is None] + specs_with_missing_pcrecs = [ + str(spec) for spec, pcrec in specs_pcrecs if pcrec is None + ] if specs_with_missing_pcrecs: if len(specs_with_missing_pcrecs) == len(specs_pcrecs): raise AssertionError("No package cache records found") else: missing_precs_list = ", ".join(specs_with_missing_pcrecs) - raise AssertionError(f"Missing package cache records for: {missing_precs_list}") + raise AssertionError( + f"Missing package cache records for: {missing_precs_list}" + ) precs_to_remove = [] prefix_data = PrefixData(prefix) @@ -140,7 +145,7 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, def rel_path(prefix, path, windows_forward_slashes=True): - res = path[len(prefix) + 1:] + res = path[len(prefix) + 1 :] if on_win and windows_forward_slashes: res = res.replace("\\", "/") return res @@ -252,10 +257,17 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): if filter: if not quiet: fh = sys.stderr if context.json else sys.stdout - print("The following packages cannot be cloned out of the root environment:", file=fh) + print( + "The following packages cannot be cloned out of the root environment:", + file=fh, + ) for prec in filter.values(): print(" - " + prec.dist_str(), file=fh) - drecs = {prec for prec in PrefixData(prefix1).iter_records() if prec["name"] not in filter} + drecs = { + prec + for prec in PrefixData(prefix1).iter_records() + if prec["name"] not in filter + } else: drecs = {prec for prec in PrefixData(prefix1).iter_records()} @@ -331,6 +343,11 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None): shutil.copystat(src, dst) actions = explicit( - urls, prefix2, verbose=not quiet, index=index, force_extract=False, index_args=index_args + urls, + prefix2, + verbose=not quiet, + index=index, + force_extract=False, + index_args=index_args, ) return actions, untracked_files diff --git a/conda/models/channel.py b/conda/models/channel.py index 74239b924c0..b30a5192d99 100644 --- a/conda/models/channel.py +++ b/conda/models/channel.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from copy import copy from itertools import chain from logging import getLogger @@ -10,13 +9,25 @@ except ImportError: # pragma: no cover from .._vendor.boltons.setutils import IndexedSet -from ..base.constants import DEFAULTS_CHANNEL_NAME, MAX_CHANNEL_PRIORITY, UNKNOWN_CHANNEL -from ..base.context import context, Context +from ..base.constants import ( + DEFAULTS_CHANNEL_NAME, + MAX_CHANNEL_PRIORITY, + UNKNOWN_CHANNEL, +) +from ..base.context import Context, context from ..common.compat import ensure_text_type, isiterable from ..common.path import is_package_file, is_path, win_path_backout -from ..common.url import (Url, has_scheme, is_url, join_url, path_to_url, - split_conda_url_easy_parts, split_platform, split_scheme_auth_token, - urlparse) +from ..common.url import ( + Url, + has_scheme, + is_url, + join_url, + path_to_url, + split_conda_url_easy_parts, + split_platform, + split_scheme_auth_token, + urlparse, +) log = getLogger(__name__) @@ -38,11 +49,13 @@ def __call__(cls, *args, **kwargs): c = Channel._cache_[value] = Channel.from_value(value) return c else: - if 'channels' in kwargs: + if "channels" in kwargs: # presence of 'channels' kwarg indicates MultiChannel - name = kwargs['name'] - channels = tuple(super(ChannelType, cls).__call__(**_kwargs) - for _kwargs in kwargs['channels']) + name = kwargs["name"] + channels = tuple( + super(ChannelType, cls).__call__(**_kwargs) + for _kwargs in kwargs["channels"] + ) return MultiChannel(name, channels) else: return super().__call__(*args, **kwargs) @@ -57,19 +70,28 @@ class Channel(metaclass=ChannelType): channel <> subchannel <> namespace <> package_name """ + _cache_ = {} @staticmethod def _reset_state(): Channel._cache_ = {} - def __init__(self, scheme=None, auth=None, location=None, token=None, name=None, - platform=None, package_filename=None): + def __init__( + self, + scheme=None, + auth=None, + location=None, + token=None, + name=None, + platform=None, + package_filename=None, + ): self.scheme = scheme self.auth = auth self.location = location self.token = token - self.name = name or '' + self.name = name or "" self.platform = platform self.package_filename = package_filename @@ -95,17 +117,17 @@ def from_channel_name(channel_name): @staticmethod def from_value(value): - if value in (None, '', 'None:///', 'None'): + if value in (None, "", "None:///", "None"): return Channel(name=UNKNOWN_CHANNEL) value = ensure_text_type(value) if has_scheme(value): - if value.startswith('file:'): + if value.startswith("file:"): value = win_path_backout(value) return Channel.from_url(value) elif is_path(value): return Channel.from_url(path_to_url(value)) elif is_package_file(value): - if value.startswith('file:'): + if value.startswith("file:"): value = win_path_backout(value) return Channel.from_url(value) else: @@ -113,7 +135,9 @@ def from_value(value): # e.g. this would be bad: repo.anaconda.com/pkgs/free _stripped, platform = split_platform(context.known_subdirs, value) if _stripped in context.custom_multichannels: - return MultiChannel(_stripped, context.custom_multichannels[_stripped], platform) + return MultiChannel( + _stripped, context.custom_multichannels[_stripped], platform + ) else: return Channel.from_channel_name(value) @@ -122,20 +146,35 @@ def make_simple_channel(channel_alias, channel_url, name=None): ca = channel_alias test_url, scheme, auth, token = split_scheme_auth_token(channel_url) if name and scheme: - return Channel(scheme=scheme, auth=auth, location=test_url, token=token, - name=name.strip('/')) + return Channel( + scheme=scheme, + auth=auth, + location=test_url, + token=token, + name=name.strip("/"), + ) if scheme: if ca.location and test_url.startswith(ca.location): - location, name = ca.location, test_url.replace(ca.location, '', 1) + location, name = ca.location, test_url.replace(ca.location, "", 1) else: url_parts = urlparse(test_url) location = str(Url(hostname=url_parts.hostname, port=url_parts.port)) - name = url_parts.path or '' - return Channel(scheme=scheme, auth=auth, location=location, token=token, - name=name.strip('/')) + name = url_parts.path or "" + return Channel( + scheme=scheme, + auth=auth, + location=location, + token=token, + name=name.strip("/"), + ) else: - return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token, - name=name and name.strip('/') or channel_url.strip('/')) + return Channel( + scheme=ca.scheme, + auth=ca.auth, + location=ca.location, + token=ca.token, + name=name and name.strip("/") or channel_url.strip("/"), + ) @property def canonical_name(self): @@ -151,7 +190,9 @@ def canonical_name(self): return cn for that_name in context.custom_channels: - if self.name and tokenized_startswith(self.name.split('/'), that_name.split('/')): + if self.name and tokenized_startswith( + self.name.split("/"), that_name.split("/") + ): cn = self.__canonical_name = self.name return cn @@ -173,7 +214,7 @@ def canonical_name(self): ) return cn else: - cn = self.__canonical_name = join_url(self.location, self.name).lstrip('/') + cn = self.__canonical_name = join_url(self.location, self.name).lstrip("/") return cn def urls(self, with_credentials=False, subdirs=None): @@ -187,15 +228,15 @@ def urls(self, with_credentials=False, subdirs=None): base = [self.location] if with_credentials and self.token: - base.extend(['t', self.token]) + base.extend(["t", self.token]) base.append(self.name) base = join_url(*base) def _platforms(): if self.platform: yield self.platform - if self.platform != 'noarch': - yield 'noarch' + if self.platform != "noarch": + yield "noarch" else: yield from subdirs @@ -211,14 +252,16 @@ def url(self, with_credentials=False): base = [self.location] if with_credentials and self.token: - base.extend(['t', self.token]) + base.extend(["t", self.token]) base.append(self.name) if self.platform: base.append(self.platform) if self.package_filename: base.append(self.package_filename) else: - first_non_noarch = next((s for s in context.subdirs if s != 'noarch'), 'noarch') + first_non_noarch = next( + (s for s in context.subdirs if s != "noarch"), "noarch" + ) base.append(first_non_noarch) base = join_url(*base) @@ -236,13 +279,13 @@ def base_url(self): @property def base_urls(self): - return self.base_url, + return (self.base_url,) @property def subdir_url(self): url = self.url(True) if self.package_filename and url: - url = url.rsplit('/', 1)[0] + url = url.rsplit("/", 1)[0] return url def __str__(self): @@ -253,7 +296,9 @@ def __str__(self): return base def __repr__(self): - return 'Channel("%s")' % (join_url(self.name, self.subdir) if self.subdir else self.name) + return 'Channel("%s")' % ( + join_url(self.name, self.subdir) if self.subdir else self.name + ) def __eq__(self, other): if isinstance(other, Channel): @@ -295,7 +340,6 @@ def dump(self): class MultiChannel(Channel): - def __init__(self, name, channels, platform=None): self.name = name self.location = None @@ -323,7 +367,9 @@ def canonical_name(self): def urls(self, with_credentials=False, subdirs=None): _channels = self._channels - return list(chain.from_iterable(c.urls(with_credentials, subdirs) for c in _channels)) + return list( + chain.from_iterable(c.urls(with_credentials, subdirs) for c in _channels) + ) @property def base_url(self): @@ -337,10 +383,7 @@ def url(self, with_credentials=False): return None def dump(self): - return { - "name": self.name, - "channels": tuple(c.dump() for c in self._channels) - } + return {"name": self.name, "channels": tuple(c.dump() for c in self._channels)} def tokenized_startswith(test_iterable, startswith_iterable): @@ -349,11 +392,15 @@ def tokenized_startswith(test_iterable, startswith_iterable): def tokenized_conda_url_startswith(test_url, startswith_url): test_url, startswith_url = urlparse(test_url), urlparse(startswith_url) - if test_url.hostname != startswith_url.hostname or test_url.port != startswith_url.port: + if ( + test_url.hostname != startswith_url.hostname + or test_url.port != startswith_url.port + ): return False - norm_url_path = lambda url: url.path.strip('/') or '/' - return tokenized_startswith(norm_url_path(test_url).split('/'), - norm_url_path(startswith_url).split('/')) + norm_url_path = lambda url: url.path.strip("/") or "/" + return tokenized_startswith( + norm_url_path(test_url).split("/"), norm_url_path(startswith_url).split("/") + ) def _get_channel_for_name(channel_name): @@ -361,7 +408,7 @@ def _get_channel_for_name_helper(name): if name in context.custom_channels: return context.custom_channels[name] else: - test_name = name.rsplit('/', 1)[0] # progressively strip off path segments + test_name = name.rsplit("/", 1)[0] # progressively strip off path segments if test_name == name: return None return _get_channel_for_name_helper(test_name) @@ -379,61 +426,88 @@ def _get_channel_for_name_helper(name): return channel else: ca = context.channel_alias - return Channel(scheme=ca.scheme, auth=ca.auth, location=ca.location, token=ca.token, - name=_stripped, platform=platform) + return Channel( + scheme=ca.scheme, + auth=ca.auth, + location=ca.location, + token=ca.token, + name=_stripped, + platform=platform, + ) def _read_channel_configuration(scheme, host, port, path): # return location, name, scheme, auth, token - path = path and path.rstrip('/') + path = path and path.rstrip("/") test_url = str(Url(hostname=host, port=port, path=path)) # Step 1. No path given; channel name is None if not path: - return str(Url(hostname=host, port=port)).rstrip("/"), None, scheme or None, None, None + return ( + str(Url(hostname=host, port=port)).rstrip("/"), + None, + scheme or None, + None, + None, + ) # Step 2. migrated_custom_channels matches - for name, location in sorted(context.migrated_custom_channels.items(), reverse=True, - key=lambda x: len(x[0])): + for name, location in sorted( + context.migrated_custom_channels.items(), reverse=True, key=lambda x: len(x[0]) + ): location, _scheme, _auth, _token = split_scheme_auth_token(location) if tokenized_conda_url_startswith(test_url, join_url(location, name)): # translate location to new location, with new credentials - subname = test_url.replace(join_url(location, name), '', 1).strip('/') + subname = test_url.replace(join_url(location, name), "", 1).strip("/") channel_name = join_url(name, subname) channel = _get_channel_for_name(channel_name) - return channel.location, channel_name, channel.scheme, channel.auth, channel.token + return ( + channel.location, + channel_name, + channel.scheme, + channel.auth, + channel.token, + ) # Step 3. migrated_channel_aliases matches for migrated_alias in context.migrated_channel_aliases: if test_url.startswith(migrated_alias.location): - name = test_url.replace(migrated_alias.location, '', 1).strip('/') + name = test_url.replace(migrated_alias.location, "", 1).strip("/") ca = context.channel_alias return ca.location, name, ca.scheme, ca.auth, ca.token # Step 4. custom_channels matches - for name, channel in sorted(context.custom_channels.items(), reverse=True, - key=lambda x: len(x[0])): + for name, channel in sorted( + context.custom_channels.items(), reverse=True, key=lambda x: len(x[0]) + ): that_test_url = join_url(channel.location, channel.name) - if tokenized_startswith(test_url.split('/'), that_test_url.split('/')): - subname = test_url.replace(that_test_url, '', 1).strip('/') - return (channel.location, join_url(channel.name, subname), scheme, - channel.auth, channel.token) + if tokenized_startswith(test_url.split("/"), that_test_url.split("/")): + subname = test_url.replace(that_test_url, "", 1).strip("/") + return ( + channel.location, + join_url(channel.name, subname), + scheme, + channel.auth, + channel.token, + ) # Step 5. channel_alias match ca = context.channel_alias - if ca.location and tokenized_startswith(test_url.split('/'), ca.location.split('/')): - name = test_url.replace(ca.location, '', 1).strip('/') or None + if ca.location and tokenized_startswith( + test_url.split("/"), ca.location.split("/") + ): + name = test_url.replace(ca.location, "", 1).strip("/") or None return ca.location, name, scheme, ca.auth, ca.token # Step 6. not-otherwise-specified file://-type urls if host is None: # this should probably only happen with a file:// type url assert port is None - location, name = test_url.rsplit('/', 1) + location, name = test_url.rsplit("/", 1) if not location: - location = '/' - _scheme, _auth, _token = 'file', None, None + location = "/" + _scheme, _auth, _token = "file", None, None return location, name, _scheme, _auth, _token # Step 7. fall through to host:port as channel_location and path as channel_name @@ -453,29 +527,45 @@ def _read_channel_configuration(scheme, host, port, path): def parse_conda_channel_url(url): - (scheme, auth, token, platform, package_filename, - host, port, path, query) = split_conda_url_easy_parts(context.known_subdirs, url) + ( + scheme, + auth, + token, + platform, + package_filename, + host, + port, + path, + query, + ) = split_conda_url_easy_parts(context.known_subdirs, url) # recombine host, port, path to get a channel_name and channel_location - (channel_location, channel_name, configured_scheme, configured_auth, - configured_token) = _read_channel_configuration(scheme, host, port, path) + ( + channel_location, + channel_name, + configured_scheme, + configured_auth, + configured_token, + ) = _read_channel_configuration(scheme, host, port, path) # if we came out with no channel_location or channel_name, we need to figure it out # from host, port, path assert channel_location is not None or channel_name is not None - return Channel(configured_scheme or 'https', - auth or configured_auth, - channel_location, - token or configured_token, - channel_name, - platform, - package_filename) + return Channel( + configured_scheme or "https", + auth or configured_auth, + channel_location, + token or configured_token, + channel_name, + platform, + package_filename, + ) # backward compatibility for conda-build def get_conda_build_local_url(): - return context.local_build_root, + return (context.local_build_root,) def prioritize_channels(channels, with_credentials=True, subdirs=None): @@ -493,7 +583,9 @@ def prioritize_channels(channels, with_credentials=True, subdirs=None): for url in channel.urls(with_credentials, subdirs): if url in result: continue - result[url] = channel.canonical_name, min(priority_counter, MAX_CHANNEL_PRIORITY - 1) + result[url] = channel.canonical_name, min( + priority_counter, MAX_CHANNEL_PRIORITY - 1 + ) return result @@ -506,7 +598,7 @@ def all_channel_urls(channels, subdirs=None, with_credentials=True): def offline_keep(url): - return not context.offline or not is_url(url) or url.startswith('file:/') + return not context.offline or not is_url(url) or url.startswith("file:/") def get_channel_objs(ctx: Context): diff --git a/conda/models/dist.py b/conda/models/dist.py index 3fc9f524166..1f3e8a167b6 100644 --- a/conda/models/dist.py +++ b/conda/models/dist.py @@ -1,31 +1,35 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import re from collections import namedtuple from logging import getLogger -import re -from .channel import Channel -from .package_info import PackageInfo -from .records import PackageRecord from .. import CondaError from ..auxlib.entity import Entity, EntityType, IntegerField, StringField -from ..base.constants import CONDA_PACKAGE_EXTENSIONS, DEFAULTS_CHANNEL_NAME, UNKNOWN_CHANNEL +from ..base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + DEFAULTS_CHANNEL_NAME, + UNKNOWN_CHANNEL, +) from ..base.context import context from ..common.compat import ensure_text_type from ..common.constants import NULL from ..common.url import has_platform, is_url, join_url +from .channel import Channel +from .package_info import PackageInfo +from .records import PackageRecord log = getLogger(__name__) -DistDetails = namedtuple('DistDetails', ('name', 'version', 'build_string', 'build_number', - 'dist_name', 'fmt')) +DistDetails = namedtuple( + "DistDetails", + ("name", "version", "build_string", "build_number", "dist_name", "fmt"), +) IndexRecord = PackageRecord # for conda-build backward compat class DistType(EntityType): - def __call__(cls, *args, **kwargs): if len(args) == 1 and not kwargs: value = args[0] @@ -34,12 +38,16 @@ def __call__(cls, *args, **kwargs): elif isinstance(value, Dist): dist = value elif isinstance(value, PackageRecord): - dist = Dist.from_string(value.fn, channel_override=value.channel.canonical_name) - elif hasattr(value, 'dist') and isinstance(value.dist, Dist): + dist = Dist.from_string( + value.fn, channel_override=value.channel.canonical_name + ) + elif hasattr(value, "dist") and isinstance(value.dist, Dist): dist = value.dist elif isinstance(value, PackageInfo): - dist = Dist.from_string(value.repodata_record.fn, - channel_override=value.channel.canonical_name) + dist = Dist.from_string( + value.repodata_record.fn, + channel_override=value.channel.canonical_name, + ) elif isinstance(value, Channel): dist = Dist.from_url(value.url()) else: @@ -53,13 +61,13 @@ def __call__(cls, *args, **kwargs): def strip_extension(original_dist): for ext in CONDA_PACKAGE_EXTENSIONS: if original_dist.endswith(ext): - original_dist = original_dist[:-len(ext)] + original_dist = original_dist[: -len(ext)] return original_dist def split_extension(original_dist): stripped = strip_extension(original_dist) - return stripped, original_dist[len(stripped):] + return stripped, original_dist[len(stripped) :] class Dist(Entity, metaclass=DistType): @@ -131,7 +139,7 @@ def pair(self): @property def quad(self): # returns: name, version, build_string, channel - parts = self.dist_name.rsplit('-', 2) + ['', ''] + parts = self.dist_name.rsplit("-", 2) + ["", ""] return parts[0], parts[1], parts[2], self.channel or DEFAULTS_CHANNEL_NAME def __str__(self): @@ -139,7 +147,7 @@ def __str__(self): @property def is_feature_package(self): - return self.dist_name.endswith('@') + return self.dist_name.endswith("@") @property def is_channel(self): @@ -152,11 +160,12 @@ def to_filename(self, extension=None): return self.dist_name + self.fmt def to_matchspec(self): - return ' '.join(self.quad[:3]) + return " ".join(self.quad[:3]) def to_match_spec(self): from .match_spec import MatchSpec - base = '='.join(self.quad[:3]) + + base = "=".join(self.quad[:3]) return MatchSpec(f"{self.channel}::{base}" if self.channel else base) @classmethod @@ -166,18 +175,21 @@ def from_string(cls, string, channel_override=NULL): if is_url(string) and channel_override == NULL: return cls.from_url(string) - if string.endswith('@'): - return cls(channel='@', - name=string, - version="", - build_string="", - build_number=0, - dist_name=string) - - REGEX_STR = (r'(?:([^\s\[\]]+)::)?' # optional channel - r'([^\s\[\]]+)' # 3.x dist - r'(?:\[([a-zA-Z0-9_-]+)\])?' # with_features_depends - ) + if string.endswith("@"): + return cls( + channel="@", + name=string, + version="", + build_string="", + build_number=0, + dist_name=string, + ) + + REGEX_STR = ( + r"(?:([^\s\[\]]+)::)?" # optional channel + r"([^\s\[\]]+)" # 3.x dist + r"(?:\[([a-zA-Z0-9_-]+)\])?" # with_features_depends + ) channel, original_dist, w_f_d = re.search(REGEX_STR, string).groups() original_dist, fmt = split_extension(original_dist) @@ -189,13 +201,15 @@ def from_string(cls, string, channel_override=NULL): # enforce dist format dist_details = cls.parse_dist_name(original_dist) - return cls(channel=channel, - name=dist_details.name, - version=dist_details.version, - build_string=dist_details.build_string, - build_number=dist_details.build_number, - dist_name=original_dist, - fmt=fmt) + return cls( + channel=channel, + name=dist_details.name, + version=dist_details.version, + build_string=dist_details.build_string, + build_number=dist_details.build_number, + dist_name=original_dist, + fmt=fmt, + ) @staticmethod def parse_dist_name(string): @@ -205,61 +219,75 @@ def parse_dist_name(string): no_fmt_string, fmt = split_extension(string) # remove any directory or channel information - if '::' in no_fmt_string: - dist_name = no_fmt_string.rsplit('::', 1)[-1] + if "::" in no_fmt_string: + dist_name = no_fmt_string.rsplit("::", 1)[-1] else: - dist_name = no_fmt_string.rsplit('/', 1)[-1] + dist_name = no_fmt_string.rsplit("/", 1)[-1] - parts = dist_name.rsplit('-', 2) + parts = dist_name.rsplit("-", 2) name = parts[0] version = parts[1] - build_string = parts[2] if len(parts) >= 3 else '' - build_number_as_string = ''.join(filter(lambda x: x.isdigit(), - (build_string.rsplit('_')[-1] - if build_string else '0'))) + build_string = parts[2] if len(parts) >= 3 else "" + build_number_as_string = "".join( + filter( + lambda x: x.isdigit(), + (build_string.rsplit("_")[-1] if build_string else "0"), + ) + ) build_number = int(build_number_as_string) if build_number_as_string else 0 - return DistDetails(name, version, build_string, build_number, dist_name, fmt) + return DistDetails( + name, version, build_string, build_number, dist_name, fmt + ) except: - raise CondaError("dist_name is not a valid conda package: %s" % original_string) + raise CondaError( + "dist_name is not a valid conda package: %s" % original_string + ) @classmethod def from_url(cls, url): assert is_url(url), url - if not any(url.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) and '::' not in url: + if ( + not any(url.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) + and "::" not in url + ): raise CondaError("url '%s' is not a conda package" % url) dist_details = cls.parse_dist_name(url) - if '::' in url: - url_no_tarball = url.rsplit('::', 1)[0] + if "::" in url: + url_no_tarball = url.rsplit("::", 1)[0] platform = context.subdir - base_url = url_no_tarball.split('::')[0] + base_url = url_no_tarball.split("::")[0] channel = str(Channel(base_url)) else: - url_no_tarball = url.rsplit('/', 1)[0] + url_no_tarball = url.rsplit("/", 1)[0] platform = has_platform(url_no_tarball, context.known_subdirs) - base_url = url_no_tarball.rsplit('/', 1)[0] if platform else url_no_tarball + base_url = url_no_tarball.rsplit("/", 1)[0] if platform else url_no_tarball channel = Channel(base_url).canonical_name if platform else UNKNOWN_CHANNEL - return cls(channel=channel, - name=dist_details.name, - version=dist_details.version, - build_string=dist_details.build_string, - build_number=dist_details.build_number, - dist_name=dist_details.dist_name, - base_url=base_url, - platform=platform, - fmt=dist_details.fmt) + return cls( + channel=channel, + name=dist_details.name, + version=dist_details.version, + build_string=dist_details.build_string, + build_number=dist_details.build_number, + dist_name=dist_details.dist_name, + base_url=base_url, + platform=platform, + fmt=dist_details.fmt, + ) def to_url(self): if not self.base_url: return None filename = self.dist_name + self.fmt - return (join_url(self.base_url, self.platform, filename) - if self.platform - else join_url(self.base_url, filename)) + return ( + join_url(self.base_url, self.platform, filename) + if self.platform + else join_url(self.base_url, filename) + ) def __key__(self): return self.channel, self.dist_name @@ -294,11 +322,11 @@ def __ne__(self, other): # ############ conda-build compatibility ################ def split(self, sep=None, maxsplit=-1): - assert sep == '::' + assert sep == "::" return [self.channel, self.dist_name] if self.channel else [self.dist_name] def rsplit(self, sep=None, maxsplit=-1): - assert sep == '-' + assert sep == "-" assert maxsplit == 2 name = f"{self.channel}::{self.quad[0]}" if self.channel else self.quad[0] return name, self.quad[1], self.quad[2] @@ -317,9 +345,9 @@ def fn(self): def dist_str_to_quad(dist_str): dist_str = strip_extension(dist_str) - if '::' in dist_str: + if "::" in dist_str: channel_str, dist_str = dist_str.split("::", 1) else: channel_str = UNKNOWN_CHANNEL - name, version, build = dist_str.rsplit('-', 2) + name, version, build = dist_str.rsplit("-", 2) return name, version, build, channel_str diff --git a/conda/models/enums.py b/conda/models/enums.py index 991079926aa..e8d23ea9355 100644 --- a/conda/models/enums.py +++ b/conda/models/enums.py @@ -1,10 +1,8 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from platform import machine import sys - from enum import Enum +from platform import machine from ..auxlib.decorators import classproperty from ..auxlib.ish import dals @@ -13,24 +11,24 @@ class Arch(Enum): - x86 = 'x86' - x86_64 = 'x86_64' + x86 = "x86" + x86_64 = "x86_64" # arm64 is for macOS and Windows - arm64 = 'arm64' - armv6l = 'armv6l' - armv7l = 'armv7l' + arm64 = "arm64" + armv6l = "armv6l" + armv7l = "armv7l" # aarch64 is for Linux only - aarch64 = 'aarch64' - ppc64 = 'ppc64' - ppc64le = 'ppc64le' - riscv64 = 'riscv64' - s390x = 's390x' - z = 'z' + aarch64 = "aarch64" + ppc64 = "ppc64" + ppc64le = "ppc64le" + riscv64 = "riscv64" + s390x = "s390x" + z = "z" @classmethod def from_sys(cls): - if sys.platform == 'zos': - return cls['z'] + if sys.platform == "zos": + return cls["z"] return cls[machine()] def __json__(self): @@ -38,21 +36,21 @@ def __json__(self): class Platform(Enum): - linux = 'linux' - win = 'win32' - openbsd = 'openbsd5' - osx = 'darwin' - zos = 'zos' + linux = "linux" + win = "win32" + openbsd = "openbsd5" + osx = "darwin" + zos = "zos" @classmethod def from_sys(cls): p = sys.platform - if p.startswith('linux'): + if p.startswith("linux"): # Changed in version 2.7.3: Since lots of code check for sys.platform == 'linux2', # and there is no essential change between Linux 2.x and 3.x, sys.platform is always # set to 'linux2', even on Linux 3.x. In Python 3.3 and later, the value will always # be set to 'linux' - p = 'linux' + p = "linux" return cls(p) def __json__(self): @@ -60,8 +58,8 @@ def __json__(self): class FileMode(Enum): - text = 'text' - binary = 'binary' + text = "text" + binary = "binary" def __str__(self): return "%s" % self.value @@ -90,16 +88,19 @@ class PathType(Enum): Refers to if the file in question is hard linked or soft linked. Originally designed to be used in paths.json """ - hardlink = 'hardlink' - softlink = 'softlink' - directory = 'directory' + + hardlink = "hardlink" + softlink = "softlink" + directory = "directory" # these additional types should not be included by conda-build in packages - linked_package_record = 'linked_package_record' # a package's .json file in conda-meta - pyc_file = 'pyc_file' - unix_python_entry_point = 'unix_python_entry_point' - windows_python_entry_point_script = 'windows_python_entry_point_script' - windows_python_entry_point_exe = 'windows_python_entry_point_exe' + linked_package_record = ( + "linked_package_record" # a package's .json file in conda-meta + ) + pyc_file = "pyc_file" + unix_python_entry_point = "unix_python_entry_point" + windows_python_entry_point_script = "windows_python_entry_point_script" + windows_python_entry_point_exe = "windows_python_entry_point_exe" @classproperty def basic_types(self): @@ -113,9 +114,9 @@ def __json__(self): class LeasedPathType(Enum): - application_entry_point = 'application_entry_point' - application_entry_point_windows_exe = 'application_entry_point_windows_exe' - application_softlink = 'application_softlink' + application_entry_point = "application_entry_point" + application_entry_point_windows_exe = "application_entry_point_windows_exe" + application_softlink = "application_softlink" def __str__(self): return self.name @@ -125,14 +126,14 @@ def __json__(self): class PackageType(Enum): - NOARCH_GENERIC = 'noarch_generic' - NOARCH_PYTHON = 'noarch_python' - VIRTUAL_PRIVATE_ENV = 'virtual_private_env' - VIRTUAL_PYTHON_WHEEL = 'virtual_python_wheel' # manageable - VIRTUAL_PYTHON_EGG_MANAGEABLE = 'virtual_python_egg_manageable' - VIRTUAL_PYTHON_EGG_UNMANAGEABLE = 'virtual_python_egg_unmanageable' - VIRTUAL_PYTHON_EGG_LINK = 'virtual_python_egg_link' # unmanageable - VIRTUAL_SYSTEM = 'virtual_system' # virtual packages representing system attributes + NOARCH_GENERIC = "noarch_generic" + NOARCH_PYTHON = "noarch_python" + VIRTUAL_PRIVATE_ENV = "virtual_private_env" + VIRTUAL_PYTHON_WHEEL = "virtual_python_wheel" # manageable + VIRTUAL_PYTHON_EGG_MANAGEABLE = "virtual_python_egg_manageable" + VIRTUAL_PYTHON_EGG_UNMANAGEABLE = "virtual_python_egg_unmanageable" + VIRTUAL_PYTHON_EGG_LINK = "virtual_python_egg_link" # unmanageable + VIRTUAL_SYSTEM = "virtual_system" # virtual packages representing system attributes @staticmethod def conda_package_types(): @@ -152,32 +153,37 @@ def unmanageable_package_types(): class NoarchType(Enum): - generic = 'generic' - python = 'python' + generic = "generic" + python = "python" @staticmethod def coerce(val): # what a mess if isinstance(val, NoarchType): return val - valtype = getattr(val, 'type', None) - if isinstance(valtype, NoarchType): # see issue #8311 + valtype = getattr(val, "type", None) + if isinstance(valtype, NoarchType): # see issue #8311 return valtype if isinstance(val, bool): val = NoarchType.generic if val else None if isinstance(val, str): val = val.lower() - if val == 'python': + if val == "python": val = NoarchType.python - elif val == 'generic': + elif val == "generic": val = NoarchType.generic else: try: val = NoarchType.generic if boolify(val) else None except TypeCoercionError: - raise CondaUpgradeError(dals(""" + raise CondaUpgradeError( + dals( + """ The noarch type for this package is set to '%s'. The current version of conda is too old to install this package. Please update conda. - """ % val)) + """ + % val + ) + ) return val diff --git a/conda/models/leased_path_entry.py b/conda/models/leased_path_entry.py index b07e117545e..8b49397f543 100644 --- a/conda/models/leased_path_entry.py +++ b/conda/models/leased_path_entry.py @@ -1,22 +1,21 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger -from .enums import LeasedPathType from ..auxlib.entity import Entity, EnumField, StringField +from .enums import LeasedPathType log = getLogger(__name__) class LeasedPathEntry(Entity): """ - _path: short path for the leased path, using forward slashes - target_path: the full path to the executable in the private env - target_prefix: the full path to the private environment - leased_path: the full path for the lease in the root prefix - package_name: the package holding the lease - leased_path_type: application_entry_point + _path: short path for the leased path, using forward slashes + target_path: the full path to the executable in the private env + target_prefix: the full path to the private environment + leased_path: the full path for the lease in the root prefix + package_name: the package holding the lease + leased_path_type: application_entry_point """ diff --git a/conda/models/match_spec.py b/conda/models/match_spec.py index 9eb01242320..43296d1a3e2 100644 --- a/conda/models/match_spec.py +++ b/conda/models/match_spec.py @@ -1,68 +1,67 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import re +import warnings from abc import ABCMeta, abstractmethod, abstractproperty - from collections.abc import Mapping from functools import reduce from itertools import chain from logging import getLogger from operator import attrgetter from os.path import basename -import warnings -import re from conda.common.iterators import groupby_to_dict as groupby -from .channel import Channel -from .version import BuildNumberMatch, VersionSpec from ..auxlib.collection import frozendict from ..auxlib.decorators import memoizedproperty from ..base.constants import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 +from ..base.context import context from ..common.compat import isiterable from ..common.io import dashlist -from ..common.path import expand, url_to_path, strip_pkg_extension, is_package_file +from ..common.path import expand, is_package_file, strip_pkg_extension, url_to_path from ..common.url import is_url, path_to_url, unquote from ..exceptions import CondaValueError, InvalidMatchSpec -from ..base.context import context +from .channel import Channel +from .version import BuildNumberMatch, VersionSpec log = getLogger(__name__) class MatchSpecType(type): - def __call__(cls, spec_arg=None, **kwargs): if spec_arg: if isinstance(spec_arg, MatchSpec) and not kwargs: return spec_arg elif isinstance(spec_arg, MatchSpec): new_kwargs = dict(spec_arg._match_components) - new_kwargs.setdefault('optional', spec_arg.optional) - new_kwargs.setdefault('target', spec_arg.target) - new_kwargs['_original_spec_str'] = spec_arg.original_spec_str + new_kwargs.setdefault("optional", spec_arg.optional) + new_kwargs.setdefault("target", spec_arg.target) + new_kwargs["_original_spec_str"] = spec_arg.original_spec_str new_kwargs.update(**kwargs) return super().__call__(**new_kwargs) elif isinstance(spec_arg, str): parsed = _parse_spec_str(spec_arg) if kwargs: parsed = dict(parsed, **kwargs) - if set(kwargs) - {'optional', 'target'}: + if set(kwargs) - {"optional", "target"}: # if kwargs has anything but optional and target, # strip out _original_spec_str from parsed - parsed.pop('_original_spec_str', None) + parsed.pop("_original_spec_str", None) return super().__call__(**parsed) elif isinstance(spec_arg, Mapping): parsed = dict(spec_arg, **kwargs) return super().__call__(**parsed) - elif hasattr(spec_arg, 'to_match_spec'): + elif hasattr(spec_arg, "to_match_spec"): spec = spec_arg.to_match_spec() if kwargs: return MatchSpec(spec, **kwargs) else: return spec else: - raise CondaValueError("Invalid MatchSpec:\n spec_arg=%s\n kwargs=%s" - % (spec_arg, kwargs)) + raise CondaValueError( + "Invalid MatchSpec:\n spec_arg=%s\n kwargs=%s" + % (spec_arg, kwargs) + ) else: return super().__call__(**kwargs) @@ -150,19 +149,19 @@ class MatchSpec(metaclass=MatchSpecType): """ FIELD_NAMES = ( - 'channel', - 'subdir', - 'name', - 'version', - 'build', - 'build_number', - 'track_features', - 'features', - 'url', - 'md5', - 'license', - 'license_family', - 'fn', + "channel", + "subdir", + "name", + "version", + "build", + "build_number", + "track_features", + "features", + "url", + "md5", + "license", + "license_family", + "fn", ) FIELD_NAMES_SET = frozenset(FIELD_NAMES) _MATCHER_CACHE = {} @@ -170,35 +169,37 @@ class MatchSpec(metaclass=MatchSpecType): def __init__(self, optional=False, target=None, **kwargs): self._optional = optional self._target = target - self._original_spec_str = kwargs.pop('_original_spec_str', None) + self._original_spec_str = kwargs.pop("_original_spec_str", None) self._match_components = self._build_components(**kwargs) @classmethod def from_dist_str(cls, dist_str): parts = {} - if dist_str[-len(CONDA_PACKAGE_EXTENSION_V2):] == CONDA_PACKAGE_EXTENSION_V2: - dist_str = dist_str[:-len(CONDA_PACKAGE_EXTENSION_V2)] - elif dist_str[-len(CONDA_PACKAGE_EXTENSION_V1):] == CONDA_PACKAGE_EXTENSION_V1: - dist_str = dist_str[:-len(CONDA_PACKAGE_EXTENSION_V1)] - if '::' in dist_str: + if dist_str[-len(CONDA_PACKAGE_EXTENSION_V2) :] == CONDA_PACKAGE_EXTENSION_V2: + dist_str = dist_str[: -len(CONDA_PACKAGE_EXTENSION_V2)] + elif dist_str[-len(CONDA_PACKAGE_EXTENSION_V1) :] == CONDA_PACKAGE_EXTENSION_V1: + dist_str = dist_str[: -len(CONDA_PACKAGE_EXTENSION_V1)] + if "::" in dist_str: channel_subdir_str, dist_str = dist_str.split("::", 1) - if '/' in channel_subdir_str: - channel_str, subdir = channel_subdir_str.rsplit('/', 1) + if "/" in channel_subdir_str: + channel_str, subdir = channel_subdir_str.rsplit("/", 1) if subdir not in context.known_subdirs: channel_str = channel_subdir_str subdir = None - parts['channel'] = channel_str + parts["channel"] = channel_str if subdir: - parts['subdir'] = subdir + parts["subdir"] = subdir else: - parts['channel'] = channel_subdir_str - - name, version, build = dist_str.rsplit('-', 2) - parts.update({ - 'name': name, - 'version': version, - 'build': build, - }) + parts["channel"] = channel_subdir_str + + name, version, build = dist_str.rsplit("-", 2) + parts.update( + { + "name": name, + "version": version, + "build": build, + } + ) return cls(**parts) def get_exact_value(self, field_name): @@ -215,9 +216,11 @@ def get(self, field_name, default=None): @property def is_name_only_spec(self): - return (len(self._match_components) == 1 - and 'name' in self._match_components - and self.name != '*') + return ( + len(self._match_components) == 1 + and "name" in self._match_components + and self.name != "*" + ) def dist_str(self): return self.__str__() @@ -242,6 +245,7 @@ def match(self, rec): if isinstance(rec, dict): # TODO: consider AttrDict instead of PackageRecord from .records import PackageRecord + rec = PackageRecord.from_objects(rec) for field_name, v in self._match_components.items(): if not self._match_individual(rec, field_name, v): @@ -256,7 +260,10 @@ def _match_individual(self, record, field_name, match_component): return match_component == val def _is_simple(self): - return len(self._match_components) == 1 and self.get_exact_value('name') is not None + return ( + len(self._match_components) == 1 + and self.get_exact_value("name") is not None + ) def _is_single(self): return len(self._match_components) == 1 @@ -264,19 +271,19 @@ def _is_single(self): def _to_filename_do_not_use(self): # WARNING: this is potentially unreliable and use should probably be limited # returns None if a filename can't be constructed - fn_field = self.get_exact_value('fn') + fn_field = self.get_exact_value("fn") if fn_field: return fn_field - vals = tuple(self.get_exact_value(x) for x in ('name', 'version', 'build')) + vals = tuple(self.get_exact_value(x) for x in ("name", "version", "build")) if not any(x is None for x in vals): - return ('%s-%s-%s' % vals) + CONDA_PACKAGE_EXTENSION_V1 + return ("%s-%s-%s" % vals) + CONDA_PACKAGE_EXTENSION_V1 else: return None def __repr__(self): builder = [f'{self.__class__.__name__}("{self}"'] if self.target: - builder.append(", target=\"%s\"" % self.target) + builder.append(', target="%s"' % self.target) if self.optional: builder.append(", optional=True") builder.append(")") @@ -286,24 +293,24 @@ def __str__(self): builder = [] brackets = [] - channel_matcher = self._match_components.get('channel') + channel_matcher = self._match_components.get("channel") if channel_matcher and channel_matcher.exact_value: builder.append(str(channel_matcher)) elif channel_matcher and not channel_matcher.matches_all: brackets.append("channel=%s" % str(channel_matcher)) - subdir_matcher = self._match_components.get('subdir') + subdir_matcher = self._match_components.get("subdir") if subdir_matcher: if channel_matcher and channel_matcher.exact_value: - builder.append('/%s' % subdir_matcher) + builder.append("/%s" % subdir_matcher) else: brackets.append("subdir=%s" % subdir_matcher) - name_matcher = self._match_components.get('name', '*') - builder.append(('::%s' if builder else '%s') % name_matcher) + name_matcher = self._match_components.get("name", "*") + builder.append(("::%s" if builder else "%s") % name_matcher) - version = self._match_components.get('version') - build = self._match_components.get('build') + version = self._match_components.get("version") + build = self._match_components.get("build") version_exact = False if version: version = str(version) @@ -327,45 +334,45 @@ def __str__(self): if build: build = str(build) - if any(s in build for s in '><$^|,'): + if any(s in build for s in "><$^|,"): brackets.append("build='%s'" % build) - elif '*' in build: + elif "*" in build: brackets.append("build=%s" % build) elif version_exact: - builder.append('=' + build) + builder.append("=" + build) else: brackets.append("build=%s" % build) - _skip = {'channel', 'subdir', 'name', 'version', 'build'} - if 'url' in self._match_components and 'fn' in self._match_components: - _skip.add('fn') + _skip = {"channel", "subdir", "name", "version", "build"} + if "url" in self._match_components and "fn" in self._match_components: + _skip.add("fn") for key in self.FIELD_NAMES: if key not in _skip and key in self._match_components: - if key == 'url' and channel_matcher: + if key == "url" and channel_matcher: # skip url in canonical str if channel already included continue value = str(self._match_components[key]) - if any(s in value for s in ', ='): + if any(s in value for s in ", ="): brackets.append(f"{key}='{value}'") else: brackets.append(f"{key}={value}") if brackets: - builder.append('[%s]' % ','.join(brackets)) + builder.append("[%s]" % ",".join(brackets)) - return ''.join(builder) + return "".join(builder) def __json__(self): return self.__str__() def conda_build_form(self): builder = [] - name = self.get_exact_value('name') + name = self.get_exact_value("name") assert name builder.append(name) - build = self.get_raw_value('build') - version = self.get_raw_value('version') + build = self.get_raw_value("build") + version = self.get_raw_value("version") if build: assert version @@ -373,7 +380,7 @@ def conda_build_form(self): elif version: builder.append(version) - return ' '.join(builder) + return " ".join(builder) def __eq__(self, other): if isinstance(other, MatchSpec): @@ -394,14 +401,15 @@ def __contains__(self, field): def _build_components(self, **kwargs): not_fields = set(kwargs) - MatchSpec.FIELD_NAMES_SET if not_fields: - raise InvalidMatchSpec(self._original_spec_str, - 'Cannot match on field(s): %s' % not_fields) + raise InvalidMatchSpec( + self._original_spec_str, "Cannot match on field(s): %s" % not_fields + ) _make_component = MatchSpec._make_component return frozendict(_make_component(key, value) for key, value in kwargs.items()) @staticmethod def _make_component(field_name, value): - if hasattr(value, 'match'): + if hasattr(value, "match"): matcher = value return field_name, matcher @@ -419,7 +427,7 @@ def _make_component(field_name, value): @property def name(self): - return self.get_exact_value('name') or '*' + return self.get_exact_value("name") or "*" # # Remaining methods are for back compatibility with conda-build. Do not remove @@ -429,12 +437,12 @@ def name(self): def strictness(self): # With the old MatchSpec, strictness==3 if name, version, and # build were all specified. - s = sum(f in self._match_components for f in ('name', 'version', 'build')) + s = sum(f in self._match_components for f in ("name", "version", "build")) if s < len(self._match_components): return 3 - elif not self.get_exact_value('name') or 'build' in self._match_components: + elif not self.get_exact_value("name") or "build" in self._match_components: return 3 - elif 'version' in self._match_components: + elif "version" in self._match_components: return 2 else: return 1 @@ -447,11 +455,11 @@ def spec(self): def version(self): # in the old MatchSpec object, version was a VersionSpec, not a str # so we'll keep that API here - return self._match_components.get('version') + return self._match_components.get("version") @property def fn(self): - val = self.get_raw_value('fn') or self.get_raw_value('url') + val = self.get_raw_value("fn") or self.get_raw_value("url") if val: val = basename(val) assert val @@ -460,22 +468,25 @@ def fn(self): @classmethod def merge(cls, match_specs, union=False): match_specs = sorted(tuple(cls(s) for s in match_specs if s), key=str) - name_groups = groupby(attrgetter('name'), match_specs) - unmergeable = name_groups.pop('*', []) + name_groups.pop(None, []) + name_groups = groupby(attrgetter("name"), match_specs) + unmergeable = name_groups.pop("*", []) + name_groups.pop(None, []) merged_specs = [] mergeable_groups = tuple( chain.from_iterable( - groupby(lambda s: s.optional, group).values() for group in name_groups.values() + groupby(lambda s: s.optional, group).values() + for group in name_groups.values() ) ) for group in mergeable_groups: - target_groups = groupby(attrgetter('target'), group) + target_groups = groupby(attrgetter("target"), group) target_groups.pop(None, None) if len(target_groups) > 1: raise ValueError("Incompatible MatchSpec merge:%s" % dashlist(group)) merged_specs.append( - reduce(lambda x, y: x._merge(y, union), group) if len(group) > 1 else group[0] + reduce(lambda x, y: x._merge(y, union), group) + if len(group) > 1 + else group[0] ) return (*merged_specs, *unmergeable) @@ -484,7 +495,6 @@ def union(cls, match_specs): return cls.merge(match_specs, union=True) def _merge(self, other, union=False): - if self.optional != other.optional or self.target != other.target: raise ValueError(f"Incompatible MatchSpec merge: - {self}\n - {other}") @@ -508,7 +518,9 @@ def _merge(self, other, union=False): else: final = this_component.merge(that_component) final_components[component_name] = final - return self.__class__(optional=self.optional, target=self.target, **final_components) + return self.__class__( + optional=self.optional, target=self.target, **final_components + ) def _parse_version_plus_build(v_plus_b): @@ -531,13 +543,15 @@ def _parse_version_plus_build(v_plus_b): >>> _parse_version_plus_build("* *") ('*', '*') """ - parts = re.search(r'((?:.+?)[^>~])(?:[ =])([^-=,|<>~]+?))?$', v_plus_b) + parts = re.search( + r"((?:.+?)[^>~])(?:[ =])([^-=,|<>~]+?))?$", v_plus_b + ) if parts: version, build = parts.groups() build = build and build.strip() else: version, build = v_plus_b, None - return version and version.replace(' ', ''), build + return version and version.replace(" ", ""), build def _parse_legacy_dist(dist_str): @@ -549,7 +563,7 @@ def _parse_legacy_dist(dist_str): ('_license', '1.1', 'py27_1') """ dist_str, _ = strip_pkg_extension(dist_str) - name, version, build = dist_str.rsplit('-', 2) + name, version, build = dist_str.rsplit("-", 2) return name, version, build @@ -572,21 +586,21 @@ def _parse_spec_str(spec_str): original_spec_str = spec_str # pre-step for ugly backward compat - if spec_str.endswith('@'): + if spec_str.endswith("@"): feature_name = spec_str[:-1] return { - 'name': '*', - 'track_features': (feature_name,), + "name": "*", + "track_features": (feature_name,), } # Step 1. strip '#' comment - if '#' in spec_str: - ndx = spec_str.index('#') + if "#" in spec_str: + ndx = spec_str.index("#") spec_str, _ = spec_str[:ndx], spec_str[ndx:] spec_str.strip() # Step 1.b strip ' if ' anticipating future compatibility issues - spec_split = spec_str.split(' if ', 1) + spec_split = spec_str.split(" if ", 1) if len(spec_split) > 1: log.debug("Ignoring conditional in spec %s", spec_str) spec_str = spec_split[0] @@ -601,59 +615,65 @@ def _parse_spec_str(spec_str): if channel.subdir: name, version, build = _parse_legacy_dist(channel.package_filename) result = { - 'channel': channel.canonical_name, - 'subdir': channel.subdir, - 'name': name, - 'version': version, - 'build': build, - 'fn': channel.package_filename, - 'url': spec_str, + "channel": channel.canonical_name, + "subdir": channel.subdir, + "name": name, + "version": version, + "build": build, + "fn": channel.package_filename, + "url": spec_str, } else: # url is not a channel - if spec_str.startswith('file://'): + if spec_str.startswith("file://"): # We must undo percent-encoding when generating fn. path_or_url = url_to_path(spec_str) else: path_or_url = spec_str return { - 'name': '*', - 'fn': basename(path_or_url), - 'url': spec_str, + "name": "*", + "fn": basename(path_or_url), + "url": spec_str, } return result # Step 3. strip off brackets portion brackets = {} - m3 = re.match(r'.*(?:(\[.*\]))', spec_str) + m3 = re.match(r".*(?:(\[.*\]))", spec_str) if m3: brackets_str = m3.groups()[0] - spec_str = spec_str.replace(brackets_str, '') + spec_str = spec_str.replace(brackets_str, "") brackets_str = brackets_str[1:-1] - m3b = re.finditer(r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', brackets_str) + m3b = re.finditer( + r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', brackets_str + ) for match in m3b: key, _, value, _ = match.groups() if not key or not value: - raise InvalidMatchSpec(original_spec_str, "key-value mismatch in brackets") + raise InvalidMatchSpec( + original_spec_str, "key-value mismatch in brackets" + ) brackets[key] = value # Step 4. strip off parens portion - m4 = re.match(r'.*(?:(\(.*\)))', spec_str) + m4 = re.match(r".*(?:(\(.*\)))", spec_str) parens = {} if m4: parens_str = m4.groups()[0] - spec_str = spec_str.replace(parens_str, '') + spec_str = spec_str.replace(parens_str, "") parens_str = parens_str[1:-1] - m4b = re.finditer(r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', parens_str) + m4b = re.finditer( + r'([a-zA-Z0-9_-]+?)=(["\']?)([^\'"]*?)(\2)(?:[, ]|$)', parens_str + ) for match in m4b: key, _, value, _ = match.groups() parens[key] = value - if 'optional' in parens_str: - parens['optional'] = True + if "optional" in parens_str: + parens["optional"] = True # Step 5. strip off '::' channel and namespace - m5 = spec_str.rsplit(':', 2) + m5 = spec_str.rsplit(":", 2) m5_len = len(m5) if m5_len == 3: channel_str, namespace, spec_str = m5 @@ -666,21 +686,23 @@ def _parse_spec_str(spec_str): else: raise NotImplementedError() channel, subdir = _parse_channel(channel_str) - if 'channel' in brackets: - b_channel, b_subdir = _parse_channel(brackets.pop('channel')) + if "channel" in brackets: + b_channel, b_subdir = _parse_channel(brackets.pop("channel")) if b_channel: channel = b_channel if b_subdir: subdir = b_subdir - if 'subdir' in brackets: - subdir = brackets.pop('subdir') + if "subdir" in brackets: + subdir = brackets.pop("subdir") # Step 6. strip off package name from remaining version + build - m3 = re.match(r'([^ =<>!~]+)?([>!~]+)?([>= 2: # name, version, build = _parse_legacy_dist(name) if spec_str: - if '[' in spec_str: - raise InvalidMatchSpec(original_spec_str, "multiple brackets sections not allowed") + if "[" in spec_str: + raise InvalidMatchSpec( + original_spec_str, "multiple brackets sections not allowed" + ) version, build = _parse_version_plus_build(spec_str) @@ -712,13 +736,13 @@ def _parse_spec_str(spec_str): # Otherwise, # translate version '=1.2.3' to '1.2.3*' # is it a simple version starting with '='? i.e. '=1.2.3' - elif version[0] == '=': + elif version[0] == "=": test_str = version[1:] - if version[:2] == '==' and build is None: + if version[:2] == "==" and build is None: version = version[2:] elif not any(c in test_str for c in "=,|"): - if build is None and test_str[-1] != '*': - version = test_str + '*' + if build is None and test_str[-1] != "*": + version = test_str + "*" else: version = test_str else: @@ -729,16 +753,16 @@ def _parse_spec_str(spec_str): components["name"] = name or "*" if channel is not None: - components['channel'] = channel + components["channel"] = channel if subdir is not None: - components['subdir'] = subdir + components["subdir"] = subdir if namespace is not None: # components['namespace'] = namespace pass if version is not None: - components['version'] = version + components["version"] = version if build is not None: - components['build'] = build + components["build"] = build # anything in brackets will now strictly override key as set in other area of spec str # EXCEPT FOR: name @@ -753,7 +777,7 @@ def _parse_spec_str(spec_str): ) del brackets["name"] components.update(brackets) - components['_original_spec_str'] = original_spec_str + components["_original_spec_str"] = original_spec_str _PARSE_CACHE[original_spec_str] = components return components @@ -782,17 +806,18 @@ def exact_value(self): def merge(self, other): if self.raw_value != other.raw_value: - raise ValueError("Incompatible component merge:\n - %r\n - %r" - % (self.raw_value, other.raw_value)) + raise ValueError( + "Incompatible component merge:\n - %r\n - %r" + % (self.raw_value, other.raw_value) + ) return self.raw_value def union(self, other): options = {self.raw_value, other.raw_value} - return '|'.join(options) + return "|".join(options) class _StrMatchMixin: - def __str__(self): return self._raw_value @@ -811,7 +836,7 @@ def exact_value(self): class ExactStrMatch(_StrMatchMixin, MatchInterface): - __slots__ = '_raw_value', + __slots__ = ("_raw_value",) def __init__(self, value): super().__init__(value) @@ -825,7 +850,6 @@ def match(self, other): class ExactLowerStrMatch(ExactStrMatch): - def __init__(self, value): super().__init__(value.lower()) @@ -838,20 +862,22 @@ def match(self, other): class GlobStrMatch(_StrMatchMixin, MatchInterface): - __slots__ = '_raw_value', '_re_match' + __slots__ = "_raw_value", "_re_match" def __init__(self, value): super().__init__(value) self._re_match = None try: - if value.startswith('^') and value.endswith('$'): + if value.startswith("^") and value.endswith("$"): self._re_match = re.compile(value).match - elif '*' in value: - value = re.escape(value).replace('\\*', r'.*') - self._re_match = re.compile(r'^(?:%s)$' % value).match + elif "*" in value: + value = re.escape(value).replace("\\*", r".*") + self._re_match = re.compile(r"^(?:%s)$" % value).match except re.error as e: - raise InvalidMatchSpec(value, f"Contains an invalid regular expression. '{e}'") + raise InvalidMatchSpec( + value, f"Contains an invalid regular expression. '{e}'" + ) def match(self, other): try: @@ -870,24 +896,23 @@ def exact_value(self): @property def matches_all(self): - return self._raw_value == '*' + return self._raw_value == "*" class GlobLowerStrMatch(GlobStrMatch): - def __init__(self, value): super().__init__(value.lower()) class SplitStrMatch(MatchInterface): - __slots__ = '_raw_value', + __slots__ = ("_raw_value",) def __init__(self, value): super().__init__(self._convert(value)) def _convert(self, value): try: - return frozenset(value.replace(' ', ',').split(',')) + return frozenset(value.replace(" ", ",").split(",")) except AttributeError: if isiterable(value): return frozenset(value) @@ -901,13 +926,13 @@ def match(self, other): def __repr__(self): if self._raw_value: - return "{%s}" % ', '.join("'%s'" % s for s in sorted(self._raw_value)) + return "{%s}" % ", ".join("'%s'" % s for s in sorted(self._raw_value)) else: - return 'set()' + return "set()" def __str__(self): # this space delimiting makes me nauseous - return ' '.join(sorted(self._raw_value)) + return " ".join(sorted(self._raw_value)) def __eq__(self, other): return isinstance(other, self.__class__) and self._raw_value == other._raw_value @@ -921,7 +946,7 @@ def exact_value(self): class FeatureMatch(MatchInterface): - __slots__ = '_raw_value', + __slots__ = ("_raw_value",) def __init__(self, value): super().__init__(self._convert(value)) @@ -930,9 +955,11 @@ def _convert(self, value): if not value: return frozenset() elif isinstance(value, str): - return frozenset(f for f in ( - ff.strip() for ff in value.replace(' ', ',').split(',') - ) if f) + return frozenset( + f + for f in (ff.strip() for ff in value.replace(" ", ",").split(",")) + if f + ) else: return frozenset(f for f in (ff.strip() for ff in value) if f) @@ -941,10 +968,10 @@ def match(self, other): return self._raw_value == other def __repr__(self): - return "[%s]" % ', '.join("'%s'" % k for k in sorted(self._raw_value)) + return "[%s]" % ", ".join("'%s'" % k for k in sorted(self._raw_value)) def __str__(self): - return ' '.join(sorted(self._raw_value)) + return " ".join(sorted(self._raw_value)) def __eq__(self, other): return isinstance(other, self.__class__) and self._raw_value == other._raw_value @@ -958,20 +985,23 @@ def exact_value(self): class ChannelMatch(GlobStrMatch): - def __init__(self, value): self._re_match = None try: if isinstance(value, str): - if value.startswith('^') and value.endswith('$'): + if value.startswith("^") and value.endswith("$"): self._re_match = re.compile(value).match - elif '*' in value: - self._re_match = re.compile(r'^(?:%s)$' % value.replace('*', r'.*')).match + elif "*" in value: + self._re_match = re.compile( + r"^(?:%s)$" % value.replace("*", r".*") + ).match else: value = Channel(value) except re.error as e: - raise InvalidMatchSpec(value, f"Contains an invalid regular expression. '{e}'") + raise InvalidMatchSpec( + value, f"Contains an invalid regular expression. '{e}'" + ) super(GlobStrMatch, self).__init__(value) @@ -999,7 +1029,6 @@ def __repr__(self): class CaseInsensitiveStrMatch(GlobLowerStrMatch): - def match(self, other): try: _other_val = other._raw_value @@ -1014,13 +1043,13 @@ def match(self, other): _implementors = { - 'channel': ChannelMatch, - 'name': GlobLowerStrMatch, - 'version': VersionSpec, - 'build': GlobStrMatch, - 'build_number': BuildNumberMatch, - 'track_features': FeatureMatch, - 'features': FeatureMatch, - 'license': CaseInsensitiveStrMatch, - 'license_family': CaseInsensitiveStrMatch, + "channel": ChannelMatch, + "name": GlobLowerStrMatch, + "version": VersionSpec, + "build": GlobStrMatch, + "build_number": BuildNumberMatch, + "track_features": FeatureMatch, + "features": FeatureMatch, + "license": CaseInsensitiveStrMatch, + "license_family": CaseInsensitiveStrMatch, } diff --git a/conda/models/package_info.py b/conda/models/package_info.py index 2ab0c76cca4..781e2caeb0c 100644 --- a/conda/models/package_info.py +++ b/conda/models/package_info.py @@ -1,11 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger -from .channel import Channel -from .enums import NoarchType -from .records import PackageRecord, PathsData from ..auxlib.entity import ( ComposableField, Entity, @@ -15,6 +11,9 @@ ListField, StringField, ) +from .channel import Channel +from .enums import NoarchType +from .records import PackageRecord, PathsData log = getLogger(__name__) @@ -26,8 +25,9 @@ def box(self, instance, instance_type, val): class Noarch(Entity): type = NoarchField(NoarchType) - entry_points = ListField(str, required=False, nullable=True, default=None, - default_in_dump=False) + entry_points = ListField( + str, required=False, nullable=True, default=None, default_in_dump=False + ) class PreferredEnv(Entity): @@ -40,12 +40,12 @@ class PackageMetadata(Entity): # from info/package_metadata.json package_metadata_version = IntegerField() noarch = ComposableField(Noarch, required=False, nullable=True) - preferred_env = ComposableField(PreferredEnv, required=False, nullable=True, default=None, - default_in_dump=False) + preferred_env = ComposableField( + PreferredEnv, required=False, nullable=True, default=None, default_in_dump=False + ) class PackageInfo(ImmutableEntity): - # attributes external to the package tarball extracted_package_dir = StringField() package_tarball_full_path = StringField() diff --git a/conda/models/prefix_graph.py b/conda/models/prefix_graph.py index 828bdd62fa2..d3b246ad15d 100644 --- a/conda/models/prefix_graph.py +++ b/conda/models/prefix_graph.py @@ -8,11 +8,11 @@ except ImportError: # pragma: no cover from .._vendor.boltons.setutils import IndexedSet -from .enums import NoarchType -from .match_spec import MatchSpec from ..base.context import context from ..common.compat import on_win from ..exceptions import CyclicalDependencyError +from .enums import NoarchType +from .match_spec import MatchSpec log = getLogger(__name__) @@ -39,8 +39,7 @@ def __init__(self, records, specs=()): for node in records: parent_match_specs = tuple(MatchSpec(d) for d in node.depends) parent_nodes = { - rec for rec in records - if any(m.match(rec) for m in parent_match_specs) + rec for rec in records if any(m.match(rec) for m in parent_match_specs) } graph[node] = parent_nodes matching_specs = IndexedSet(s for s in specs if s.match(node)) @@ -64,7 +63,7 @@ def remove_spec(self, spec): # If the spec was a track_features spec, then we need to also remove every # package with a feature that matches the track_feature. - for feature_name in spec.get_raw_value('track_features') or (): + for feature_name in spec.get_raw_value("track_features") or (): feature_spec = MatchSpec(features=feature_name) node_matches.update(node for node in self.graph if feature_spec.match(node)) @@ -72,10 +71,7 @@ def remove_spec(self, spec): for node in node_matches: remove_these.add(node) remove_these.update(self.all_descendants(node)) - remove_these = tuple(filter( - lambda node: node in remove_these, - self.graph - )) + remove_these = tuple(filter(lambda node: node in remove_these, self.graph)) for node in remove_these: self._remove_node(node) self._toposort() @@ -91,13 +87,17 @@ def remove_youngest_descendant_nodes_with_specs(self): """ graph = self.graph spec_matches = self.spec_matches - inverted_graph = {node: {key for key in graph if node in graph[key]} for node in graph} + inverted_graph = { + node: {key for key in graph if node in graph[key]} for node in graph + } youngest_nodes_with_specs = tuple( node for node, children in inverted_graph.items() if not children and node in spec_matches ) - removed_nodes = tuple(filter(lambda node: node in youngest_nodes_with_specs, self.graph)) + removed_nodes = tuple( + filter(lambda node: node in youngest_nodes_with_specs, self.graph) + ) for node in removed_nodes: self._remove_node(node) self._toposort() @@ -120,7 +120,9 @@ def prune(self): removed_nodes = set() while True: - inverted_graph = {node: {key for key in graph if node in graph[key]} for node in graph} + inverted_graph = { + node: {key for key in graph if node in graph[key]} for node in graph + } prunable_nodes = tuple( node for node, children in inverted_graph.items() @@ -132,10 +134,9 @@ def prune(self): removed_nodes.add(node) self._remove_node(node) - removed_nodes = tuple(filter( - lambda node: node in removed_nodes, - original_order - )) + removed_nodes = tuple( + filter(lambda node: node in removed_nodes, original_order) + ) self._toposort() return removed_nodes @@ -144,7 +145,9 @@ def get_node_by_name(self, name): def all_descendants(self, node): graph = self.graph - inverted_graph = {node: {key for key in graph if node in graph[key]} for node in graph} + inverted_graph = { + node: {key for key in graph if node in graph[key]} for node in graph + } nodes = [node] nodes_seen = set() @@ -155,12 +158,7 @@ def all_descendants(self, node): nodes_seen.add(child_node) nodes.append(child_node) q += 1 - return tuple( - filter( - lambda node: node in nodes_seen, - graph - ) - ) + return tuple(filter(lambda node: node in nodes_seen, graph)) def all_ancestors(self, node): graph = self.graph @@ -173,18 +171,13 @@ def all_ancestors(self, node): nodes_seen.add(parent_node) nodes.append(parent_node) q += 1 - return tuple( - filter( - lambda node: node in nodes_seen, - graph - ) - ) + return tuple(filter(lambda node: node in nodes_seen, graph)) def _remove_node(self, node): - """ Removes this node and all edges referencing it. """ + """Removes this node and all edges referencing it.""" graph = self.graph if node not in graph: - raise KeyError('node %s does not exist' % node) + raise KeyError("node %s does not exist" % node) graph.pop(node) self.spec_matches.pop(node, None) @@ -209,10 +202,12 @@ def _toposort_raise_on_cycles(cls, graph): return while True: - no_parent_nodes = IndexedSet(sorted( - (node for node, parents in graph.items() if len(parents) == 0), - key=lambda x: x.name - )) + no_parent_nodes = IndexedSet( + sorted( + (node for node, parents in graph.items() if len(parents) == 0), + key=lambda x: x.name, + ) + ) if not no_parent_nodes: break @@ -233,11 +228,17 @@ def _topo_sort_handle_cycles(cls, graph): v.discard(k) # disconnected nodes go first - nodes_that_are_parents = {node for parents in graph.values() for node in parents} + nodes_that_are_parents = { + node for parents in graph.values() for node in parents + } nodes_without_parents = (node for node in graph if not graph[node]) disconnected_nodes = sorted( - (node for node in nodes_without_parents if node not in nodes_that_are_parents), - key=lambda x: x.name + ( + node + for node in nodes_without_parents + if node not in nodes_that_are_parents + ), + key=lambda x: x.name, ) yield from disconnected_nodes @@ -250,7 +251,7 @@ def _topo_sort_handle_cycles(cls, graph): except CyclicalDependencyError as e: # TODO: Turn this into a warning, but without being too annoying with # multiple messages. See https://github.com/conda/conda/issues/4067 - log.debug('%r', e) + log.debug("%r", e) yield cls._toposort_pop_key(graph) @@ -286,15 +287,17 @@ def _toposort_prepare_graph(graph): if node.name == "python": parents = graph[node] for parent in tuple(parents): - if parent.name == 'pip': + if parent.name == "pip": parents.remove(parent) if on_win: # 2. Special case code for menuinst. # Always link/unlink menuinst first/last on windows in case a subsequent # package tries to import it to create/remove a shortcut. - menuinst_node = next((node for node in graph if node.name == 'menuinst'), None) - python_node = next((node for node in graph if node.name == 'python'), None) + menuinst_node = next( + (node for node in graph if node.name == "menuinst"), None + ) + python_node = next((node for node in graph if node.name == "python"), None) if menuinst_node: # add menuinst as a parent if python is a parent and the node # isn't a parent of menuinst @@ -309,15 +312,19 @@ def _toposort_prepare_graph(graph): # that have entry points use conda's own conda.exe python entry point binary. If # conda is going to be updated during an operation, the unlink / link order matters. # See issue #6057. - conda_node = next((node for node in graph if node.name == 'conda'), None) + conda_node = next((node for node in graph if node.name == "conda"), None) if conda_node: # add conda as a parent if python is a parent and node isn't a parent of conda conda_parents = graph[conda_node] for node, parents in graph.items(): - if (hasattr(node, 'noarch') and node.noarch == NoarchType.python - and node not in conda_parents): + if ( + hasattr(node, "noarch") + and node.noarch == NoarchType.python + and node not in conda_parents + ): parents.add(conda_node) + # def dot_repr(self, title=None): # pragma: no cover # # graphviz DOT graph description language # diff --git a/conda/models/records.py b/conda/models/records.py index 8cc369cd8f8..76596d6e0bf 100644 --- a/conda/models/records.py +++ b/conda/models/records.py @@ -18,9 +18,6 @@ except ImportError: # pragma: no cover from .._vendor.boltons.timeutils import dt_to_timestamp, isoparse -from .channel import Channel -from .enums import FileMode, LinkType, NoarchType, PackageType, PathType, Platform -from .match_spec import MatchSpec from ..auxlib.entity import ( BooleanField, ComposableField, @@ -35,15 +32,18 @@ from ..base.context import context from ..common.compat import isiterable from ..exceptions import PathNotFoundError +from .channel import Channel +from .enums import FileMode, LinkType, NoarchType, PackageType, PathType, Platform +from .match_spec import MatchSpec class LinkTypeField(EnumField): def box(self, instance, instance_type, val): if isinstance(val, str): - val = val.replace('-', '').replace('_', '').lower() - if val == 'hard': + val = val.replace("-", "").replace("_", "").lower() + if val == "hard": val = LinkType.hardlink - elif val == 'soft': + elif val == "soft": val = LinkType.softlink return super().box(instance, instance_type, val) @@ -54,7 +54,6 @@ def box(self, instance, instance_type, val): class TimestampField(NumberField): - def __init__(self): super().__init__(default=0, required=False, default_in_dump=False) @@ -63,7 +62,9 @@ def _make_seconds(val): if val: val = val if val > 253402300799: # 9999-12-31 - val /= 1000 # convert milliseconds to seconds; see conda/conda-build#1988 + val /= ( + 1000 # convert milliseconds to seconds; see conda/conda-build#1988 + ) return val @staticmethod @@ -97,29 +98,27 @@ class Link(DictSafeMixin, Entity): type = LinkTypeField(LinkType, required=False) -EMPTY_LINK = Link(source='') +EMPTY_LINK = Link(source="") class _FeaturesField(ListField): - def __init__(self, **kwargs): super().__init__(str, **kwargs) def box(self, instance, instance_type, val): if isinstance(val, str): - val = val.replace(' ', ',').split(',') + val = val.replace(" ", ",").split(",") val = tuple(f for f in (ff.strip() for ff in val) if f) return super().box(instance, instance_type, val) def dump(self, instance, instance_type, val): if isiterable(val): - return ' '.join(val) + return " ".join(val) else: return val or () # default value is (), and default_in_dump=False class ChannelField(ComposableField): - def __init__(self, aliases=()): super().__init__(Channel, required=False, aliases=aliases) @@ -139,7 +138,6 @@ def __get__(self, instance, instance_type): class SubdirField(StringField): - def __init__(self): super().__init__(required=False) @@ -159,7 +157,7 @@ def __get__(self, instance, instance_type): except AttributeError: platform, arch = None, None if platform and not arch: - return self.unbox(instance, instance_type, 'noarch') + return self.unbox(instance, instance_type, "noarch") elif platform: if "x86" in arch: arch = "64" if "64" in arch else "32" @@ -169,7 +167,6 @@ def __get__(self, instance, instance_type): class FilenameField(StringField): - def __init__(self, aliases=()): super().__init__(required=False, aliases=aliases) @@ -189,10 +186,13 @@ def __get__(self, instance, instance_type): class PackageTypeField(EnumField): - def __init__(self): super().__init__( - PackageType, required=False, nullable=True, default=None, default_in_dump=False + PackageType, + required=False, + nullable=True, + default=None, + default_in_dump=False, ) def __get__(self, instance, instance_type): @@ -212,10 +212,13 @@ def __get__(self, instance, instance_type): class PathData(Entity): _path = StringField() - prefix_placeholder = StringField(required=False, nullable=True, default=None, - default_in_dump=False) + prefix_placeholder = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) file_mode = EnumField(FileMode, required=False, nullable=True) - no_link = BooleanField(required=False, nullable=True, default=None, default_in_dump=False) + no_link = BooleanField( + required=False, nullable=True, default=None, default_in_dump=False + ) path_type = EnumField(PathType) @property @@ -242,22 +245,29 @@ class PathsData(Entity): class PackageRecord(DictSafeMixin, Entity): name = StringField() version = StringField() - build = StringField(aliases=('build_string',)) + build = StringField(aliases=("build_string",)) build_number = IntegerField() # the canonical code abbreviation for PackageRef is `pref` # fields required to uniquely identifying a package - channel = ChannelField(aliases=('schannel',)) + channel = ChannelField(aliases=("schannel",)) subdir = SubdirField() - fn = FilenameField(aliases=('filename',)) + fn = FilenameField(aliases=("filename",)) - md5 = StringField(default=None, required=False, nullable=True, default_in_dump=False) - legacy_bz2_md5 = StringField(default=None, required=False, nullable=True, - default_in_dump=False) + md5 = StringField( + default=None, required=False, nullable=True, default_in_dump=False + ) + legacy_bz2_md5 = StringField( + default=None, required=False, nullable=True, default_in_dump=False + ) legacy_bz2_size = IntegerField(required=False, nullable=True, default_in_dump=False) - url = StringField(default=None, required=False, nullable=True, default_in_dump=False) - sha256 = StringField(default=None, required=False, nullable=True, default_in_dump=False) + url = StringField( + default=None, required=False, nullable=True, default_in_dump=False + ) + sha256 = StringField( + default=None, required=False, nullable=True, default_in_dump=False + ) metadata_signature_status = StringField( default="", required=False, nullable=True, default_in_dump=False @@ -273,8 +283,12 @@ def _pkey(self): return self.__pkey except AttributeError: __pkey = self.__pkey = [ - self.channel.canonical_name, self.subdir, self.name, - self.version, self.build_number, self.build + self.channel.canonical_name, + self.subdir, + self.name, + self.version, + self.build_number, + self.build, ] # NOTE: fn is included to distinguish between .conda and .tar.bz2 packages if context.separate_format_cache: @@ -298,7 +312,8 @@ def dist_str(self): ("/" + self.subdir) if self.subdir else "", self.name, self.version, - self.build) + self.build, + ) def dist_fields_dump(self): return { @@ -321,13 +336,19 @@ def dist_fields_dump(self): track_features = _FeaturesField(required=False, default=(), default_in_dump=False) features = _FeaturesField(required=False, default=(), default_in_dump=False) - noarch = NoarchField(NoarchType, required=False, nullable=True, default=None, - default_in_dump=False) # TODO: rename to package_type - preferred_env = StringField(required=False, nullable=True, default=None, default_in_dump=False) + noarch = NoarchField( + NoarchType, required=False, nullable=True, default=None, default_in_dump=False + ) # TODO: rename to package_type + preferred_env = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) - license = StringField(required=False, nullable=True, default=None, default_in_dump=False) - license_family = StringField(required=False, nullable=True, default=None, - default_in_dump=False) + license = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) + license_family = StringField( + required=False, nullable=True, default=None, default_in_dump=False + ) package_type = PackageTypeField() @property @@ -339,8 +360,9 @@ def is_unmanageable(self): @property def combined_depends(self): from .match_spec import MatchSpec + result = {ms.name: ms for ms in MatchSpec.merge(self.depends)} - for spec in (self.constrains or ()): + for spec in self.constrains or (): ms = MatchSpec(spec) result[ms.name] = MatchSpec(ms, optional=(ms.name not in result)) return tuple(result.values()) @@ -359,7 +381,11 @@ def combined_depends(self): def __str__(self): return "{}/{}::{}=={}={}".format( - self.channel.canonical_name, self.subdir, self.name, self.version, self.build + self.channel.canonical_name, + self.subdir, + self.name, + self.version, + self.build, ) def to_match_spec(self): @@ -392,7 +418,6 @@ def record_id(self): class Md5Field(StringField): - def __init__(self): super().__init__(required=False, nullable=True) @@ -407,7 +432,6 @@ def __get__(self, instance, instance_type): class PackageCacheRecord(PackageRecord): - package_tarball_full_path = StringField() extracted_package_dir = StringField() @@ -416,39 +440,43 @@ class PackageCacheRecord(PackageRecord): @property def is_fetched(self): from ..gateways.disk.read import isfile + return isfile(self.package_tarball_full_path) @property def is_extracted(self): from ..gateways.disk.read import isdir, isfile + epd = self.extracted_package_dir - return isdir(epd) and isfile(join(epd, 'info', 'index.json')) + return isdir(epd) and isfile(join(epd, "info", "index.json")) @property def tarball_basename(self): return basename(self.package_tarball_full_path) def _calculate_md5sum(self): - memoized_md5 = getattr(self, '_memoized_md5', None) + memoized_md5 = getattr(self, "_memoized_md5", None) if memoized_md5: return memoized_md5 from os.path import isfile + if isfile(self.package_tarball_full_path): from ..gateways.disk.read import compute_sum md5sum = compute_sum(self.package_tarball_full_path, "md5") - setattr(self, '_memoized_md5', md5sum) + setattr(self, "_memoized_md5", md5sum) return md5sum class PrefixRecord(PackageRecord): - package_tarball_full_path = StringField(required=False) extracted_package_dir = StringField(required=False) files = ListField(str, default=(), required=False) - paths_data = ComposableField(PathsData, required=False, nullable=True, default_in_dump=False) + paths_data = ComposableField( + PathsData, required=False, nullable=True, default_in_dump=False + ) link = ComposableField(Link, required=False) # app = ComposableField(App, required=False) diff --git a/conda/models/version.py b/conda/models/version.py index 266be9d3fe0..70be5d19eb2 100644 --- a/conda/models/version.py +++ b/conda/models/version.py @@ -2,10 +2,10 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from logging import getLogger import operator as op import re from itertools import zip_longest +from logging import getLogger from ..exceptions import InvalidVersionSpec @@ -25,13 +25,12 @@ def ver_eval(vtest, spec): return VersionSpec(spec).match(vtest) -version_check_re = re.compile(r'^[\*\.\+!_0-9a-z]+$') -version_split_re = re.compile('([0-9]+|[*]+|[^0-9*]+)') +version_check_re = re.compile(r"^[\*\.\+!_0-9a-z]+$") +version_split_re = re.compile("([0-9]+|[*]+|[^0-9*]+)") version_cache = {} class SingleStrArgCachingType(type): - def __call__(cls, arg): if isinstance(arg, cls): return arg @@ -155,19 +154,20 @@ class VersionOrder(metaclass=SingleStrArgCachingType): 1.0.1_ < 1.0.1a => True # ensure correct ordering for openssl """ + _cache_ = {} def __init__(self, vstr): # version comparison is case-insensitive version = vstr.strip().rstrip().lower() # basic validity checks - if version == '': + if version == "": raise InvalidVersionSpec(vstr, "empty version string") invalid = not version_check_re.match(version) - if invalid and '-' in version and '_' not in version: + if invalid and "-" in version and "_" not in version: # Allow for dashes as long as there are no underscores # as well, by converting the former to the latter. - version = version.replace('-', '_') + version = version.replace("-", "_") invalid = not version_check_re.match(version) if invalid: raise InvalidVersionSpec(vstr, "invalid character(s)") @@ -178,10 +178,10 @@ def __init__(self, vstr): self.fillvalue = 0 # find epoch - version = version.split('!') + version = version.split("!") if len(version) == 1: # epoch not given => set it to '0' - epoch = ['0'] + epoch = ["0"] elif len(version) == 2: # epoch given, must be an integer if not version[0].isdigit(): @@ -191,14 +191,14 @@ def __init__(self, vstr): raise InvalidVersionSpec(vstr, "duplicated epoch separator '!'") # find local version string - version = version[-1].split('+') + version = version[-1].split("+") if len(version) == 1: # no local version self.local = [] # Case 2: We have a local version component in version[1] elif len(version) == 2: # local version given - self.local = version[1].replace('_', '.').split('.') + self.local = version[1].replace("_", ".").split(".") else: raise InvalidVersionSpec(vstr, "duplicated local version separator '+'") @@ -206,17 +206,19 @@ def __init__(self, vstr): # e.g. "+", "1.2", "+a", "+1". # This is an error because specifying only a local version is invalid. # version[0] is empty because vstr.split("+") returns something like ['', '1.2'] - if version[0] == '': - raise InvalidVersionSpec(vstr, "Missing version before local version separator '+'") + if version[0] == "": + raise InvalidVersionSpec( + vstr, "Missing version before local version separator '+'" + ) if version[0][-1] == "_": # If the last character of version is "-" or "_", don't split that out # individually. Implements the instructions for openssl-like versions # > You can work-around this problem by appending a dash to plain version numbers - split_version = version[0][:-1].replace('_', '.').split('.') + split_version = version[0][:-1].replace("_", ".").split(".") split_version[-1] += "_" else: - split_version = version[0].replace('_', '.').split('.') + split_version = version[0].replace("_", ".").split(".") self.version = epoch + split_version # split components into runs of numerals and non-numerals, @@ -229,13 +231,13 @@ def __init__(self, vstr): for j in range(len(c)): if c[j].isdigit(): c[j] = int(c[j]) - elif c[j] == 'post': + elif c[j] == "post": # ensure number < 'post' == infinity - c[j] = float('inf') - elif c[j] == 'dev': + c[j] = float("inf") + elif c[j] == "dev": # ensure '*' < 'DEV' < '_' < 'a' < number # by upper-casing (all other strings are lower case) - c[j] = 'DEV' + c[j] = "DEV" if v[k][0].isdigit(): v[k] = c else: @@ -257,7 +259,9 @@ def _eq(self, t1, t2): return True def __eq__(self, other): - return self._eq(self.version, other.version) and self._eq(self.local, other.local) + return self._eq(self.version, other.version) and self._eq( + self.local, other.local + ) def startswith(self, other): # Tests if the version lists match up to the last element in "other". @@ -315,9 +319,11 @@ def __ge__(self, other): # each token slurps up leading whitespace, which we strip out. -VSPEC_TOKENS = (r'\s*\^[^$]*[$]|' # regexes - r'\s*[()|,]|' # parentheses, logical and, logical or - r'[^()|,]+') # everything else +VSPEC_TOKENS = ( + r"\s*\^[^$]*[$]|" # regexes + r"\s*[()|,]|" # parentheses, logical and, logical or + r"[^()|,]+" +) # everything else def treeify(spec_str): @@ -339,7 +345,7 @@ def treeify(spec_str): # Converts a VersionSpec expression string into a tuple-based # expression tree. assert isinstance(spec_str, str) - tokens = re.findall(VSPEC_TOKENS, '(%s)' % spec_str) + tokens = re.findall(VSPEC_TOKENS, "(%s)" % spec_str) output = [] stack = [] @@ -360,27 +366,29 @@ def apply_ops(cstop): r = r[1:] if r[0] == c else (r,) left = output.pop() left = left[1:] if left[0] == c else (left,) - output.append((c,)+left+r) + output.append((c,) + left + r) for item in tokens: item = item.strip() - if item == '|': - apply_ops('(') - stack.append('|') - elif item == ',': - apply_ops('|(') - stack.append(',') - elif item == '(': - stack.append('(') - elif item == ')': - apply_ops('(') - if not stack or stack[-1] != '(': + if item == "|": + apply_ops("(") + stack.append("|") + elif item == ",": + apply_ops("|(") + stack.append(",") + elif item == "(": + stack.append("(") + elif item == ")": + apply_ops("(") + if not stack or stack[-1] != "(": raise InvalidVersionSpec(spec_str, "expression must start with '('") stack.pop() else: output.append(item) if stack: - raise InvalidVersionSpec(spec_str, "unable to convert to expression tree: %s" % stack) + raise InvalidVersionSpec( + spec_str, "unable to convert to expression tree: %s" % stack + ) if not output: raise InvalidVersionSpec(spec_str, "unable to determine version from spec") return output[0] @@ -401,43 +409,47 @@ def untreeify(spec, _inand=False, depth=0): '1.5|((1.6|1.7),1.8,1.9)|2.0|2.1' """ if isinstance(spec, tuple): - if spec[0] == '|': - res = '|'.join(map(lambda x: untreeify(x, depth=depth + 1), spec[1:])) + if spec[0] == "|": + res = "|".join(map(lambda x: untreeify(x, depth=depth + 1), spec[1:])) if _inand or depth > 0: - res = '(%s)' % res + res = "(%s)" % res else: - res = ','.join(map(lambda x: untreeify(x, _inand=True, depth=depth + 1), spec[1:])) + res = ",".join( + map(lambda x: untreeify(x, _inand=True, depth=depth + 1), spec[1:]) + ) if depth > 0: - res = '(%s)' % res + res = "(%s)" % res return res return spec def compatible_release_operator(x, y): - return op.__ge__(x, y) and x.startswith(VersionOrder(".".join(str(y).split(".")[:-1]))) + return op.__ge__(x, y) and x.startswith( + VersionOrder(".".join(str(y).split(".")[:-1])) + ) # This RE matches the operators '==', '!=', '<=', '>=', '<', '>' # followed by a version string. It rejects expressions like # '<= 1.2' (space after operator), '<>1.2' (unknown operator), # and '<=!1.2' (nonsensical operator). -version_relation_re = re.compile(r'^(=|==|!=|<=|>=|<|>|~=)(?![=<>!~])(\S+)$') -regex_split_re = re.compile(r'.*[()|,^$]') +version_relation_re = re.compile(r"^(=|==|!=|<=|>=|<|>|~=)(?![=<>!~])(\S+)$") +regex_split_re = re.compile(r".*[()|,^$]") OPERATOR_MAP = { - '==': op.__eq__, - '!=': op.__ne__, - '<=': op.__le__, - '>=': op.__ge__, - '<': op.__lt__, - '>': op.__gt__, - '=': lambda x, y: x.startswith(y), + "==": op.__eq__, + "!=": op.__ne__, + "<=": op.__le__, + ">=": op.__ge__, + "<": op.__lt__, + ">": op.__gt__, + "=": lambda x, y: x.startswith(y), "!=startswith": lambda x, y: not x.startswith(y), "~=": compatible_release_operator, } -OPERATOR_START = frozenset(('=', '<', '>', '!', '~')) +OPERATOR_START = frozenset(("=", "<", ">", "!", "~")) -class BaseSpec: +class BaseSpec: def __init__(self, spec_str, matcher, is_exact): self.spec_str = spec_str self._is_exact = is_exact @@ -507,13 +519,12 @@ def __init__(self, vspec): super().__init__(vspec_str, matcher, is_exact) def get_matcher(self, vspec): - if isinstance(vspec, str) and regex_split_re.match(vspec): vspec = treeify(vspec) if isinstance(vspec, tuple): vspec_tree = vspec - _matcher = self.any_match if vspec_tree[0] == '|' else self.all_match + _matcher = self.any_match if vspec_tree[0] == "|" else self.all_match tup = tuple(VersionSpec(s) for s in vspec_tree[1:]) vspec_str = untreeify((vspec_tree[0],) + tuple(t.spec for t in tup)) self.tup = tup @@ -522,10 +533,11 @@ def get_matcher(self, vspec): return vspec_str, matcher, is_exact vspec_str = str(vspec).strip() - if vspec_str[0] == '^' or vspec_str[-1] == '$': - if vspec_str[0] != '^' or vspec_str[-1] != '$': - raise InvalidVersionSpec(vspec_str, "regex specs must start " - "with '^' and end with '$'") + if vspec_str[0] == "^" or vspec_str[-1] == "$": + if vspec_str[0] != "^" or vspec_str[-1] != "$": + raise InvalidVersionSpec( + vspec_str, "regex specs must start " "with '^' and end with '$'" + ) self.regex = re.compile(vspec_str) matcher = self.regex_match is_exact = False @@ -534,7 +546,7 @@ def get_matcher(self, vspec): if m is None: raise InvalidVersionSpec(vspec_str, "invalid operator") operator_str, vo_str = m.groups() - if vo_str[-2:] == '.*': + if vo_str[-2:] == ".*": if operator_str in ("=", ">="): vo_str = vo_str[:-2] elif operator_str == "!=": @@ -543,31 +555,36 @@ def get_matcher(self, vspec): elif operator_str == "~=": raise InvalidVersionSpec(vspec_str, "invalid operator with '.*'") else: - log.warning("Using .* with relational operator is superfluous and deprecated " - "and will be removed in a future version of conda. Your spec was " - "{}, but conda is ignoring the .* and treating it as {}" - .format(vo_str, vo_str[:-2])) + log.warning( + "Using .* with relational operator is superfluous and deprecated " + "and will be removed in a future version of conda. Your spec was " + "{}, but conda is ignoring the .* and treating it as {}".format( + vo_str, vo_str[:-2] + ) + ) vo_str = vo_str[:-2] try: self.operator_func = OPERATOR_MAP[operator_str] except KeyError: - raise InvalidVersionSpec(vspec_str, "invalid operator: %s" % operator_str) + raise InvalidVersionSpec( + vspec_str, "invalid operator: %s" % operator_str + ) self.matcher_vo = VersionOrder(vo_str) matcher = self.operator_match is_exact = operator_str == "==" - elif vspec_str == '*': + elif vspec_str == "*": matcher = self.always_true_match is_exact = False - elif '*' in vspec_str.rstrip('*'): - rx = vspec_str.replace('.', r'\.').replace('+', r'\+').replace('*', r'.*') - rx = r'^(?:%s)$' % rx + elif "*" in vspec_str.rstrip("*"): + rx = vspec_str.replace(".", r"\.").replace("+", r"\+").replace("*", r".*") + rx = r"^(?:%s)$" % rx self.regex = re.compile(rx) matcher = self.regex_match is_exact = False - elif vspec_str[-1] == '*': - if vspec_str[-2:] != '.*': - vspec_str = vspec_str[:-1] + '.*' + elif vspec_str[-1] == "*": + if vspec_str[-2:] != ".*": + vspec_str = vspec_str[:-1] + ".*" # if vspec_str[-1] in OPERATOR_START: # m = version_relation_re.match(vspec_str) @@ -579,12 +596,12 @@ def get_matcher(self, vspec): # else: # pass - vo_str = vspec_str.rstrip('*').rstrip('.') + vo_str = vspec_str.rstrip("*").rstrip(".") self.operator_func = VersionOrder.startswith self.matcher_vo = VersionOrder(vo_str) matcher = self.operator_match is_exact = False - elif '@' not in vspec_str: + elif "@" not in vspec_str: self.operator_func = OPERATOR_MAP["=="] self.matcher_vo = VersionOrder(vspec_str) matcher = self.operator_match @@ -596,14 +613,14 @@ def get_matcher(self, vspec): def merge(self, other): assert isinstance(other, self.__class__) - return self.__class__(','.join(sorted((self.raw_value, other.raw_value)))) + return self.__class__(",".join(sorted((self.raw_value, other.raw_value)))) def union(self, other): assert isinstance(other, self.__class__) options = {self.raw_value, other.raw_value} # important: we only return a string here because the parens get gobbled otherwise # this info is for visual display only, not for feeding into actual matches - return '|'.join(sorted(options)) + return "|".join(sorted(options)) # TODO: someday switch out these class names for consistency @@ -628,10 +645,10 @@ def get_matcher(self, vspec): return vspec, matcher, is_exact vspec_str = str(vspec).strip() - if vspec_str == '*': + if vspec_str == "*": matcher = self.always_true_match is_exact = False - elif vspec_str.startswith(('=', '<', '>', '!')): + elif vspec_str.startswith(("=", "<", ">", "!")): m = version_relation_re.match(vspec_str) if m is None: raise InvalidVersionSpec(vspec_str, "invalid operator") @@ -639,15 +656,18 @@ def get_matcher(self, vspec): try: self.operator_func = OPERATOR_MAP[operator_str] except KeyError: - raise InvalidVersionSpec(vspec_str, "invalid operator: %s" % operator_str) + raise InvalidVersionSpec( + vspec_str, "invalid operator: %s" % operator_str + ) self.matcher_vo = VersionOrder(vo_str) matcher = self.operator_match is_exact = operator_str == "==" - elif vspec_str[0] == '^' or vspec_str[-1] == '$': - if vspec_str[0] != '^' or vspec_str[-1] != '$': - raise InvalidVersionSpec(vspec_str, "regex specs must start " - "with '^' and end with '$'") + elif vspec_str[0] == "^" or vspec_str[-1] == "$": + if vspec_str[0] != "^" or vspec_str[-1] != "$": + raise InvalidVersionSpec( + vspec_str, "regex specs must start " "with '^' and end with '$'" + ) self.regex = re.compile(vspec_str) matcher = self.regex_match @@ -662,13 +682,15 @@ def get_matcher(self, vspec): def merge(self, other): if self.raw_value != other.raw_value: - raise ValueError("Incompatible component merge:\n - %r\n - %r" - % (self.raw_value, other.raw_value)) + raise ValueError( + "Incompatible component merge:\n - %r\n - %r" + % (self.raw_value, other.raw_value) + ) return self.raw_value def union(self, other): options = {self.raw_value, other.raw_value} - return '|'.join(options) + return "|".join(options) @property def exact_value(self) -> int | None: diff --git a/conda/notices/cache.py b/conda/notices/cache.py index 0132a9d8397..a298463b448 100644 --- a/conda/notices/cache.py +++ b/conda/notices/cache.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - """ Handles all caching logic including: - Retrieving from cache @@ -16,10 +15,9 @@ from typing import Optional, Sequence, Set from .._vendor.appdirs import user_cache_dir -from ..base.constants import APP_NAME, NOTICES_CACHE_SUBDIR, NOTICES_CACHE_FN +from ..base.constants import APP_NAME, NOTICES_CACHE_FN, NOTICES_CACHE_SUBDIR from ..utils import ensure_dir_exists - -from .types import ChannelNoticeResponse, ChannelNotice +from .types import ChannelNotice, ChannelNoticeResponse logger = logging.getLogger(__name__) @@ -42,7 +40,9 @@ def wrapper(url: str, name: str): return wrapper -def is_notice_response_cache_expired(channel_notice_response: ChannelNoticeResponse) -> bool: +def is_notice_response_cache_expired( + channel_notice_response: ChannelNoticeResponse, +) -> bool: """ This checks the contents of the cache response to see if it is expired. @@ -60,7 +60,8 @@ def is_channel_notice_expired(expired_at: Optional[datetime]) -> bool: return expired_at < now return any( - is_channel_notice_expired(chn.expired_at) for chn in channel_notice_response.notices + is_channel_notice_expired(chn.expired_at) + for chn in channel_notice_response.notices ) @@ -107,7 +108,9 @@ def write_notice_response_to_cache( """ Writes our notice data to our local cache location """ - cache_key = ChannelNoticeResponse.get_cache_key(channel_notice_response.url, cache_dir) + cache_key = ChannelNoticeResponse.get_cache_key( + channel_notice_response.url, cache_dir + ) with open(cache_key, "w") as fp: json.dump(channel_notice_response.json_data, fp) diff --git a/conda/notices/core.py b/conda/notices/core.py index 740d3fd75fe..e4f068a8dd4 100644 --- a/conda/notices/core.py +++ b/conda/notices/core.py @@ -7,13 +7,10 @@ from functools import wraps from typing import Sequence -from ..base.context import context, Context -from ..base.constants import NOTICES_FN, NOTICES_DECORATOR_DISPLAY_INTERVAL +from ..base.constants import NOTICES_DECORATOR_DISPLAY_INTERVAL, NOTICES_FN +from ..base.context import Context, context from ..models.channel import Channel, MultiChannel, get_channel_objs - -from . import cache -from . import views -from . import fetch +from . import cache, fetch, views from .types import ChannelNotice, ChannelNoticeResponse, ChannelNoticeResultSet # Used below in type hints @@ -39,7 +36,9 @@ def retrieve_notices( silent: Whether to use a spinner when fetching and caching notices. """ channel_name_urls = get_channel_name_and_urls(get_channel_objs(context)) - channel_notice_responses = fetch.get_notice_responses(channel_name_urls, silent=silent) + channel_notice_responses = fetch.get_notice_responses( + channel_name_urls, silent=silent + ) channel_notices = flatten_notice_responses(channel_notice_responses) total_number_channel_notices = len(channel_notices) @@ -52,10 +51,14 @@ def retrieve_notices( viewed_notices = None viewed_channel_notices = 0 if not always_show_viewed: - viewed_notices = cache.get_viewed_channel_notice_ids(cache_file, channel_notices) + viewed_notices = cache.get_viewed_channel_notice_ids( + cache_file, channel_notices + ) viewed_channel_notices = len(viewed_notices) - channel_notices = filter_notices(channel_notices, limit=limit, exclude=viewed_notices) + channel_notices = filter_notices( + channel_notices, limit=limit, exclude=viewed_notices + ) return ChannelNoticeResultSet( channel_notices=channel_notices, @@ -95,6 +98,7 @@ def notices(func): Args: func: Function to be decorated """ + @wraps(func) def wrapper(*args, **kwargs): if is_channel_notices_enabled(context): diff --git a/conda/notices/fetch.py b/conda/notices/fetch.py index cebe8d64bb3..d89875422f7 100644 --- a/conda/notices/fetch.py +++ b/conda/notices/fetch.py @@ -1,15 +1,13 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import logging from concurrent.futures import ThreadPoolExecutor -from typing import Sequence, Tuple, Optional +from typing import Optional, Sequence, Tuple import requests from ..common.io import Spinner from ..gateways.connection.session import CondaSession - from .cache import cached_response from .types import ChannelNoticeResponse @@ -17,7 +15,9 @@ def get_notice_responses( - url_and_names: Sequence[Tuple[str, str]], silent: bool = False, max_workers: int = 10 + url_and_names: Sequence[Tuple[str, str]], + silent: bool = False, + max_workers: int = 10, ) -> Sequence[ChannelNoticeResponse]: """ Provided a list of channel notification url/name tuples, return a sequence of @@ -55,7 +55,9 @@ def get_channel_notice_response(url: str, name: str) -> Optional[ChannelNoticeRe """ session = CondaSession() try: - resp = session.get(url, allow_redirects=False, timeout=5) # timeout: connect, read + resp = session.get( + url, allow_redirects=False, timeout=5 + ) # timeout: connect, read except requests.exceptions.Timeout: logger.info(f"Request timed out for channel: {name} url: {url}") return diff --git a/conda/notices/types.py b/conda/notices/types.py index 3bfc1ac6885..2c48c930775 100644 --- a/conda/notices/types.py +++ b/conda/notices/types.py @@ -1,8 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from datetime import datetime import hashlib +from datetime import datetime from pathlib import Path from typing import NamedTuple, Optional, Sequence diff --git a/conda/notices/views.py b/conda/notices/views.py index 423dd1d925e..807942e02f4 100644 --- a/conda/notices/views.py +++ b/conda/notices/views.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - """ Handles all display/view logic """ @@ -22,7 +21,9 @@ def print_notices(channel_notices: Sequence[ChannelNotice]): if current_channel != channel_notice.channel_name: print() channel_header = "Channel" - channel_header += f' "{channel_notice.channel_name}" has the following notices:' + channel_header += ( + f' "{channel_notice.channel_name}" has the following notices:' + ) print(channel_header) current_channel = channel_notice.channel_name print_notice_message(channel_notice) diff --git a/conda/plan.py b/conda/plan.py index ebf3ae72641..3b8abb1b1d9 100644 --- a/conda/plan.py +++ b/conda/plan.py @@ -10,9 +10,9 @@ module. """ +import sys from collections import defaultdict from logging import getLogger -import sys try: from boltons.setutils import IndexedSet @@ -43,50 +43,57 @@ # TODO: Remove conda/plan.py. This module should be almost completely deprecated now. + def print_dists(dists_extras): fmt = " %-27s|%17s" - print(fmt % ('package', 'build')) - print(fmt % ('-' * 27, '-' * 17)) + print(fmt % ("package", "build")) + print(fmt % ("-" * 27, "-" * 17)) for prec, extra in dists_extras: - line = fmt % (prec.name + '-' + prec.version, prec.build) + line = fmt % (prec.name + "-" + prec.version, prec.build) if extra: line += extra print(line) -def display_actions(actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=()): +def display_actions( + actions, index, show_channel_urls=None, specs_to_remove=(), specs_to_add=() +): prefix = actions.get("PREFIX") - builder = ['', '## Package Plan ##\n'] + builder = ["", "## Package Plan ##\n"] if prefix: - builder.append(' environment location: %s' % prefix) - builder.append('') + builder.append(" environment location: %s" % prefix) + builder.append("") if specs_to_remove: - builder.append(' removed specs: %s' - % dashlist(sorted(str(s) for s in specs_to_remove), indent=4)) - builder.append('') + builder.append( + " removed specs: %s" + % dashlist(sorted(str(s) for s in specs_to_remove), indent=4) + ) + builder.append("") if specs_to_add: - builder.append(' added / updated specs: %s' - % dashlist(sorted(str(s) for s in specs_to_add), indent=4)) - builder.append('') - print('\n'.join(builder)) + builder.append( + " added / updated specs: %s" + % dashlist(sorted(str(s) for s in specs_to_add), indent=4) + ) + builder.append("") + print("\n".join(builder)) if show_channel_urls is None: show_channel_urls = context.show_channel_urls def channel_str(rec): - if rec.get('schannel'): - return rec['schannel'] - if rec.get('url'): - return Channel(rec['url']).canonical_name - if rec.get('channel'): - return Channel(rec['channel']).canonical_name + if rec.get("schannel"): + return rec["schannel"] + if rec.get("url"): + return Channel(rec["url"]).canonical_name + if rec.get("channel"): + return Channel(rec["channel"]).canonical_name return UNKNOWN_CHANNEL def channel_filt(s): if show_channel_urls is False: - return '' + return "" if show_channel_urls is None and s == DEFAULTS_CHANNEL_NAME: - return '' + return "" return s if actions.get(FETCH): @@ -95,40 +102,44 @@ def channel_filt(s): disp_lst = [] for prec in actions[FETCH]: assert isinstance(prec, PackageRecord) - extra = '%15s' % human_bytes(prec['size']) + extra = "%15s" % human_bytes(prec["size"]) schannel = channel_filt(prec.channel.canonical_name) if schannel: - extra += ' ' + schannel + extra += " " + schannel disp_lst.append((prec, extra)) print_dists(disp_lst) if index and len(actions[FETCH]) > 1: - num_bytes = sum(prec['size'] for prec in actions[FETCH]) - print(' ' * 4 + '-' * 60) + num_bytes = sum(prec["size"] for prec in actions[FETCH]) + print(" " * 4 + "-" * 60) print(" " * 43 + "Total: %14s" % human_bytes(num_bytes)) # package -> [oldver-oldbuild, newver-newbuild] - packages = defaultdict(lambda: list(('', ''))) - features = defaultdict(lambda: list(('', ''))) - channels = defaultdict(lambda: list(('', ''))) + packages = defaultdict(lambda: list(("", ""))) + features = defaultdict(lambda: list(("", ""))) + channels = defaultdict(lambda: list(("", ""))) records = defaultdict(lambda: list((None, None))) linktypes = {} for prec in actions.get(LINK, []): assert isinstance(prec, PackageRecord) - pkg = prec['name'] + pkg = prec["name"] channels[pkg][1] = channel_str(prec) - packages[pkg][1] = prec['version'] + '-' + prec['build'] + packages[pkg][1] = prec["version"] + "-" + prec["build"] records[pkg][1] = prec - linktypes[pkg] = LinkType.hardlink # TODO: this is a lie; may have to give this report after UnlinkLinkTransaction.verify() # NOQA - features[pkg][1] = ','.join(prec.get('features') or ()) + linktypes[ + pkg + ] = ( + LinkType.hardlink + ) # TODO: this is a lie; may have to give this report after UnlinkLinkTransaction.verify() # NOQA + features[pkg][1] = ",".join(prec.get("features") or ()) for prec in actions.get(UNLINK, []): assert isinstance(prec, PackageRecord) - pkg = prec['name'] + pkg = prec["name"] channels[pkg][0] = channel_str(prec) - packages[pkg][0] = prec['version'] + '-' + prec['build'] + packages[pkg][0] = prec["version"] + "-" + prec["build"] records[pkg][0] = prec - features[pkg][0] = ','.join(prec.get('features') or ()) + features[pkg][0] = ",".join(prec.get("features") or ()) new = {p for p in packages if not packages[p][0]} removed = {p for p in packages if not packages[p][1]} @@ -158,32 +169,32 @@ def channel_filt(s): # string with new-style string formatting. oldfmt[pkg] = f"{{pkg:<{maxpkg}}} {{vers[0]:<{maxoldver}}}" if maxoldchannels: - oldfmt[pkg] += ' {channels[0]:<%s}' % maxoldchannels + oldfmt[pkg] += " {channels[0]:<%s}" % maxoldchannels if features[pkg][0]: - oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures + oldfmt[pkg] += " [{features[0]:<%s}]" % maxoldfeatures lt = LinkType(linktypes.get(pkg, LinkType.hardlink)) - lt = '' if lt == LinkType.hardlink else (' (%s)' % lt) + lt = "" if lt == LinkType.hardlink else (" (%s)" % lt) if pkg in removed or pkg in new: oldfmt[pkg] += lt continue - newfmt[pkg] = '{vers[1]:<%s}' % maxnewver + newfmt[pkg] = "{vers[1]:<%s}" % maxnewver if maxnewchannels: - newfmt[pkg] += ' {channels[1]:<%s}' % maxnewchannels + newfmt[pkg] += " {channels[1]:<%s}" % maxnewchannels if features[pkg][1]: - newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures + newfmt[pkg] += " [{features[1]:<%s}]" % maxnewfeatures newfmt[pkg] += lt P0 = records[pkg][0] P1 = records[pkg][1] - pri0 = P0.get('priority') - pri1 = P1.get('priority') + pri0 = P0.get("priority") + pri1 = P1.get("priority") if pri0 is None or pri1 is None: pri0 = pri1 = 1 try: - if str(P1.version) == 'custom': - newver = str(P0.version) != 'custom' + if str(P1.version) == "custom": + newver = str(P0.version) != "custom" oldver = not newver else: # <= here means that unchanged packages will be put in updated @@ -196,7 +207,11 @@ def channel_filt(s): oldver = P0.version > P1.version oldbld = P0.build_number > P1.build_number newbld = P0.build_number < P1.build_number - if context.channel_priority and pri1 < pri0 and (oldver or not newver and not newbld): + if ( + context.channel_priority + and pri1 < pri0 + and (oldver or not newver and not newbld) + ): channeled.add(pkg) elif newver: updated.add(pkg) @@ -209,13 +224,14 @@ def channel_filt(s): else: downgraded.add(pkg) - arrow = ' --> ' - lead = ' ' * 4 + arrow = " --> " + lead = " " * 4 def format(s, pkg): chans = [channel_filt(c) for c in channels[pkg]] - return lead + s.format(pkg=pkg + ':', vers=packages[pkg], - channels=chans, features=features[pkg]) + return lead + s.format( + pkg=pkg + ":", vers=packages[pkg], channels=chans, features=features[pkg] + ) if new: print("\nThe following NEW packages will be INSTALLED:\n") @@ -234,7 +250,9 @@ def format(s, pkg): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) if channeled: - print("\nThe following packages will be SUPERSEDED by a higher-priority channel:\n") + print( + "\nThe following packages will be SUPERSEDED by a higher-priority channel:\n" + ) for pkg in sorted(channeled): print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) @@ -245,7 +263,7 @@ def format(s, pkg): if empty and actions.get(SYMLINK_CONDA): print("\nThe following empty environments will be CREATED:\n") - print(actions['PREFIX']) + print(actions["PREFIX"]) print() @@ -268,7 +286,9 @@ def _get_best_prec_match(precs): assert precs for chn in context.channels: channel_matcher = ChannelMatch(chn) - prec_matches = tuple(prec for prec in precs if channel_matcher.match(prec.channel.name)) + prec_matches = tuple( + prec for prec in precs if channel_matcher.match(prec.channel.name) + ) if prec_matches: break else: @@ -287,7 +307,9 @@ def revert_actions(prefix, revision=-1, index=None): # TODO: This is wrong!!!!!!!!!! user_requested_specs = h.get_requested_specs_map().values() try: - target_state = {MatchSpec.from_dist_str(dist_str) for dist_str in h.get_state(revision)} + target_state = { + MatchSpec.from_dist_str(dist_str) for dist_str in h.get_state(revision) + } except IndexError: raise CondaIndexError("no such revision: %d" % revision) @@ -316,6 +338,7 @@ def revert_actions(prefix, revision=-1, index=None): # ---------------------------- Backwards compat for conda-build -------------------------- + @time_recorder("execute_actions") def execute_actions(actions, index, verbose=False): # pragma: no cover plan = _plan_from_actions(actions, index) @@ -325,16 +348,16 @@ def execute_actions(actions, index, verbose=False): # pragma: no cover def _plan_from_actions(actions, index): # pragma: no cover from .instructions import ACTION_CODES, PREFIX, PRINT, PROGRESS, PROGRESS_COMMANDS - if 'op_order' in actions and actions['op_order']: - op_order = actions['op_order'] + if "op_order" in actions and actions["op_order"]: + op_order = actions["op_order"] else: op_order = ACTION_CODES assert PREFIX in actions and actions[PREFIX] prefix = actions[PREFIX] - plan = [('PREFIX', '%s' % prefix)] + plan = [("PREFIX", "%s" % prefix)] - unlink_link_transaction = actions.get('UNLINKLINKTRANSACTION') + unlink_link_transaction = actions.get("UNLINKLINKTRANSACTION") if unlink_link_transaction: raise RuntimeError() # progressive_fetch_extract = actions.get('PROGRESSIVEFETCHEXTRACT') @@ -343,8 +366,8 @@ def _plan_from_actions(actions, index): # pragma: no cover # plan.append((UNLINKLINKTRANSACTION, unlink_link_transaction)) # return plan - axn = actions.get('ACTION') or None - specs = actions.get('SPECS', []) + axn = actions.get("ACTION") or None + specs = actions.get("SPECS", []) log.debug(f"Adding plans for operations: {op_order}") for op in op_order: @@ -354,12 +377,14 @@ def _plan_from_actions(actions, index): # pragma: no cover if not actions[op]: log.trace(f"action {op} has None value") continue - if '_' not in op: - plan.append((PRINT, '%sing packages ...' % op.capitalize())) - elif op.startswith('RM_'): - plan.append((PRINT, 'Pruning %s packages from the cache ...' % op[3:].lower())) + if "_" not in op: + plan.append((PRINT, "%sing packages ..." % op.capitalize())) + elif op.startswith("RM_"): + plan.append( + (PRINT, "Pruning %s packages from the cache ..." % op[3:].lower()) + ) if op in PROGRESS_COMMANDS: - plan.append((PROGRESS, '%d' % len(actions[op]))) + plan.append((PROGRESS, "%d" % len(actions[op]))) for arg in actions[op]: log.debug(f"appending value {arg} for action {op}") plan.append((op, arg)) @@ -371,12 +396,21 @@ def _plan_from_actions(actions, index): # pragma: no cover def _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs): # pragma: no cover from os.path import isdir - from .models.dist import Dist - from .instructions import LINK, PROGRESSIVEFETCHEXTRACT, UNLINK, UNLINKLINKTRANSACTION - from .core.package_cache_data import ProgressiveFetchExtract + from .core.link import PrefixSetup, UnlinkLinkTransaction + from .core.package_cache_data import ProgressiveFetchExtract + from .instructions import ( + LINK, + PROGRESSIVEFETCHEXTRACT, + UNLINK, + UNLINKLINKTRANSACTION, + ) + from .models.dist import Dist + # this is only used for conda-build at this point - first_unlink_link_idx = next((q for q, p in enumerate(plan) if p[0] in (UNLINK, LINK)), -1) + first_unlink_link_idx = next( + (q for q, p in enumerate(plan) if p[0] in (UNLINK, LINK)), -1 + ) if first_unlink_link_idx >= 0: grouped_instructions = groupby(lambda x: x[0], plan) unlink_dists = tuple(Dist(d[1]) for d in grouped_instructions.get(UNLINK, ())) @@ -396,9 +430,11 @@ def _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs): # pragma: n pfe.prepare() stp = PrefixSetup(prefix, unlink_precs, link_precs, (), specs, ()) - plan.insert(first_unlink_link_idx, (UNLINKLINKTRANSACTION, UnlinkLinkTransaction(stp))) + plan.insert( + first_unlink_link_idx, (UNLINKLINKTRANSACTION, UnlinkLinkTransaction(stp)) + ) plan.insert(first_unlink_link_idx, (PROGRESSIVEFETCHEXTRACT, pfe)) - elif axn in ('INSTALL', 'CREATE'): + elif axn in ("INSTALL", "CREATE"): plan.insert(0, (UNLINKLINKTRANSACTION, (prefix, (), (), (), specs))) return plan @@ -406,6 +442,7 @@ def _inject_UNLINKLINKTRANSACTION(plan, index, prefix, axn, specs): # pragma: n def _handle_menuinst(unlink_dists, link_dists): # pragma: no cover from .common.compat import on_win + if not on_win: return unlink_dists, link_dists @@ -413,7 +450,9 @@ def _handle_menuinst(unlink_dists, link_dists): # pragma: no cover # package tries to import it to create/remove a shortcut # unlink - menuinst_idx = next((q for q, d in enumerate(unlink_dists) if d.name == 'menuinst'), None) + menuinst_idx = next( + (q for q, d in enumerate(unlink_dists) if d.name == "menuinst"), None + ) if menuinst_idx is not None: unlink_dists = ( *unlink_dists[:menuinst_idx], @@ -422,7 +461,9 @@ def _handle_menuinst(unlink_dists, link_dists): # pragma: no cover ) # link - menuinst_idx = next((q for q, d in enumerate(link_dists) if d.name == 'menuinst'), None) + menuinst_idx = next( + (q for q, d in enumerate(link_dists) if d.name == "menuinst"), None + ) if menuinst_idx is not None: link_dists = ( *link_dists[menuinst_idx : menuinst_idx + 1], @@ -434,20 +475,37 @@ def _handle_menuinst(unlink_dists, link_dists): # pragma: no cover @time_recorder("install_actions") -def install_actions(prefix, index, specs, force=False, only_names=None, always_copy=False, - pinned=True, update_deps=True, prune=False, - channel_priority_map=None, is_update=False, - minimal_hint=False): # pragma: no cover +def install_actions( + prefix, + index, + specs, + force=False, + only_names=None, + always_copy=False, + pinned=True, + update_deps=True, + prune=False, + channel_priority_map=None, + is_update=False, + minimal_hint=False, +): # pragma: no cover # this is for conda-build - with env_vars({ - 'CONDA_ALLOW_NON_CHANNEL_URLS': 'true', - 'CONDA_SOLVER_IGNORE_TIMESTAMPS': 'false', - }, stack_callback=stack_context_default): + with env_vars( + { + "CONDA_ALLOW_NON_CHANNEL_URLS": "true", + "CONDA_SOLVER_IGNORE_TIMESTAMPS": "false", + }, + stack_callback=stack_context_default, + ): from os.path import basename + from .models.channel import Channel from .models.dist import Dist + if channel_priority_map: - channel_names = IndexedSet(Channel(url).canonical_name for url in channel_priority_map) + channel_names = IndexedSet( + Channel(url).canonical_name for url in channel_priority_map + ) channels = IndexedSet(Channel(cn) for cn in channel_names) subdirs = IndexedSet(basename(url) for url in channel_priority_map) else: @@ -455,15 +513,19 @@ def install_actions(prefix, index, specs, force=False, only_names=None, always_c if LAST_CHANNEL_URLS: channel_priority_map = prioritize_channels(LAST_CHANNEL_URLS) channels = IndexedSet(Channel(url) for url in channel_priority_map) - subdirs = IndexedSet( - subdir for subdir in (c.subdir for c in channels) if subdir - ) or context.subdirs + subdirs = ( + IndexedSet( + subdir for subdir in (c.subdir for c in channels) if subdir + ) + or context.subdirs + ) else: channels = subdirs = None specs = tuple(MatchSpec(spec) for spec in specs) from .core.prefix_data import PrefixData + PrefixData._cache_.clear() solver_backend = context.plugin_manager.get_cached_solver_backend() @@ -473,20 +535,40 @@ def install_actions(prefix, index, specs, force=False, only_names=None, always_c txn = solver.solve_for_transaction(prune=prune, ignore_pinned=not pinned) prefix_setup = txn.prefix_setups[prefix] actions = get_blank_actions(prefix) - actions['UNLINK'].extend(Dist(prec) for prec in prefix_setup.unlink_precs) - actions['LINK'].extend(Dist(prec) for prec in prefix_setup.link_precs) + actions["UNLINK"].extend(Dist(prec) for prec in prefix_setup.unlink_precs) + actions["LINK"].extend(Dist(prec) for prec in prefix_setup.link_precs) return actions def get_blank_actions(prefix): # pragma: no cover from collections import defaultdict - from .instructions import (CHECK_EXTRACT, CHECK_FETCH, EXTRACT, FETCH, LINK, PREFIX, - RM_EXTRACTED, RM_FETCHED, SYMLINK_CONDA, UNLINK) + + from .instructions import ( + CHECK_EXTRACT, + CHECK_FETCH, + EXTRACT, + FETCH, + LINK, + PREFIX, + RM_EXTRACTED, + RM_FETCHED, + SYMLINK_CONDA, + UNLINK, + ) + actions = defaultdict(list) actions[PREFIX] = prefix - actions['op_order'] = (CHECK_FETCH, RM_FETCHED, FETCH, CHECK_EXTRACT, - RM_EXTRACTED, EXTRACT, - UNLINK, LINK, SYMLINK_CONDA) + actions["op_order"] = ( + CHECK_FETCH, + RM_FETCHED, + FETCH, + CHECK_EXTRACT, + RM_EXTRACTED, + EXTRACT, + UNLINK, + LINK, + SYMLINK_CONDA, + ) return actions @@ -499,7 +581,9 @@ def execute_plan(old_plan, index=None, verbose=False): # pragma: no cover execute_instructions(plan, index, verbose) -def execute_instructions(plan, index=None, verbose=False, _commands=None): # pragma: no cover +def execute_instructions( + plan, index=None, verbose=False, _commands=None +): # pragma: no cover """Execute the instructions in the plan :param plan: A list of (instruction, arg) tuples @@ -508,34 +592,35 @@ def execute_instructions(plan, index=None, verbose=False, _commands=None): # pr :param _commands: (For testing only) dict mapping an instruction to executable if None then the default commands will be used """ - from .instructions import commands, PROGRESS_COMMANDS from .base.context import context + from .instructions import PROGRESS_COMMANDS, commands from .models.dist import Dist + if _commands is None: _commands = commands log.debug("executing plan %s", plan) - state = {'i': None, 'prefix': context.root_prefix, 'index': index} + state = {"i": None, "prefix": context.root_prefix, "index": index} for instruction, arg in plan: + log.debug(" %s(%r)", instruction, arg) - log.debug(' %s(%r)', instruction, arg) - - if state['i'] is not None and instruction in PROGRESS_COMMANDS: - state['i'] += 1 - getLogger('progress.update').info((Dist(arg).dist_name, - state['i'] - 1)) + if state["i"] is not None and instruction in PROGRESS_COMMANDS: + state["i"] += 1 + getLogger("progress.update").info((Dist(arg).dist_name, state["i"] - 1)) cmd = _commands[instruction] if callable(cmd): cmd(state, arg) - if (state['i'] is not None and instruction in PROGRESS_COMMANDS - and state['maxval'] == state['i']): - - state['i'] = None - getLogger('progress.stop').info(None) + if ( + state["i"] is not None + and instruction in PROGRESS_COMMANDS + and state["maxval"] == state["i"] + ): + state["i"] = None + getLogger("progress.stop").info(None) def _update_old_plan(old_plan): # pragma: no cover @@ -545,19 +630,22 @@ def _update_old_plan(old_plan): # pragma: no cover """ plan = [] for line in old_plan: - if line.startswith('#'): + if line.startswith("#"): continue - if ' ' not in line: + if " " not in line: from .exceptions import ArgumentError - raise ArgumentError("The instruction '%s' takes at least" - " one argument" % line) - instruction, arg = line.split(' ', 1) + raise ArgumentError( + "The instruction '%s' takes at least" " one argument" % line + ) + + instruction, arg = line.split(" ", 1) plan.append((instruction, arg)) return plan -if __name__ == '__main__': +if __name__ == "__main__": # for testing new revert_actions() only from pprint import pprint + pprint(dict(revert_actions(sys.prefix, int(sys.argv[1])))) diff --git a/conda/plugins/hookspec.py b/conda/plugins/hookspec.py index e1a7789eb25..e6e5aee288a 100644 --- a/conda/plugins/hookspec.py +++ b/conda/plugins/hookspec.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from __future__ import annotations from collections.abc import Iterable diff --git a/conda/plugins/manager.py b/conda/plugins/manager.py index 6e70f7308f3..d355cf2aee5 100644 --- a/conda/plugins/manager.py +++ b/conda/plugins/manager.py @@ -8,12 +8,12 @@ import pluggy -from . import solvers, virtual_packages -from .hookspec import CondaSpecs, spec_name from ..auxlib.ish import dals from ..base.context import context from ..core.solve import Solver from ..exceptions import CondaValueError, PluginError +from . import solvers, virtual_packages +from .hookspec import CondaSpecs, spec_name log = logging.getLogger(__name__) @@ -23,6 +23,7 @@ class CondaPluginManager(pluggy.PluginManager): The conda plugin manager to implement behavior additional to pluggy's default plugin manager. """ + #: Cached version of the :meth:`~conda.plugins.manager.CondaPluginManager.get_solver_backend` #: method. get_cached_solver_backend = None @@ -34,7 +35,9 @@ def __init__(self, project_name: str | None = None, *args, **kwargs) -> None: super().__init__(project_name, *args, **kwargs) # Make the cache containers local to the instances so that the # reference from cache to the instance gets garbage collected with the instance - self.get_cached_solver_backend = functools.lru_cache(maxsize=None)(self.get_solver_backend) + self.get_cached_solver_backend = functools.lru_cache(maxsize=None)( + self.get_solver_backend + ) def load_plugins(self, *plugins) -> list[str]: """ @@ -54,9 +57,7 @@ def load_plugins(self, *plugins) -> list[str]: plugin_names.append(plugin_name) return plugin_names - def load_entrypoints( - self, group: str, name: str | None = None - ) -> int: + def load_entrypoints(self, group: str, name: str | None = None) -> int: """Load modules from querying the specified setuptools ``group``. :param str group: Entry point group to load plugins. :param str name: If given, loads only plugins with the given ``name``. @@ -81,7 +82,9 @@ def load_entrypoints( # set up after CLI initialization and argument parsing, # meaning that it comes too late to properly render # a traceback - log.warning(f"Could not load conda plugin `{entry_point.name}`:\n\n{err}") + log.warning( + f"Could not load conda plugin `{entry_point.name}`:\n\n{err}" + ) continue self.register(plugin, name=entry_point.name) count += 1 @@ -103,7 +106,9 @@ def get_hook_results(self, name: str) -> list: ) # Check for conflicts seen = set() - conflicts = [plugin for plugin in plugins if plugin.name in seen or seen.add(plugin.name)] + conflicts = [ + plugin for plugin in plugins if plugin.name in seen or seen.add(plugin.name) + ] if conflicts: raise PluginError( dals( diff --git a/conda/plugins/solvers.py b/conda/plugins/solvers.py index 53099490979..fd706303d1e 100644 --- a/conda/plugins/solvers.py +++ b/conda/plugins/solvers.py @@ -1,7 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from . import hookimpl, CondaSolver from ..base.constants import CLASSIC_SOLVER +from . import CondaSolver, hookimpl @hookimpl(tryfirst=True) # make sure the classic solver can't be overwritten diff --git a/conda/plugins/types.py b/conda/plugins/types.py index f008b500ecc..f86dc070250 100644 --- a/conda/plugins/types.py +++ b/conda/plugins/types.py @@ -3,6 +3,7 @@ from __future__ import annotations from typing import Callable, NamedTuple + from ..core.solve import Solver diff --git a/conda/plugins/virtual_packages/cuda.py b/conda/plugins/virtual_packages/cuda.py index 2fb6e55186a..e4330725efa 100644 --- a/conda/plugins/virtual_packages/cuda.py +++ b/conda/plugins/virtual_packages/cuda.py @@ -9,7 +9,7 @@ from contextlib import suppress from ...common.decorators import env_override -from .. import hookimpl, CondaVirtualPackage +from .. import CondaVirtualPackage, hookimpl @env_override("CONDA_OVERRIDE_CUDA", convert_empty_to_none=True) diff --git a/conda/plugins/virtual_packages/windows.py b/conda/plugins/virtual_packages/windows.py index 7fcf980d1ec..6bfc7a623f6 100644 --- a/conda/plugins/virtual_packages/windows.py +++ b/conda/plugins/virtual_packages/windows.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from ...base.context import context from .. import CondaVirtualPackage, hookimpl diff --git a/conda/resolve.py b/conda/resolve.py index 00655567fa2..a5bdaf8ab30 100644 --- a/conda/resolve.py +++ b/conda/resolve.py @@ -1,26 +1,36 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from collections import defaultdict, deque import copy import itertools +from collections import defaultdict, deque from functools import lru_cache from logging import DEBUG, getLogger +from tqdm import tqdm + from conda.common.iterators import groupby_to_dict as groupby -from .auxlib.decorators import memoizemethod from ._vendor.frozendict import FrozenOrderedDict as frozendict -from tqdm import tqdm -from .base.constants import ChannelPriority, MAX_CHANNEL_PRIORITY, SatSolverChoice +from .auxlib.decorators import memoizemethod +from .base.constants import MAX_CHANNEL_PRIORITY, ChannelPriority, SatSolverChoice from .base.context import context from .common.compat import on_win from .common.io import dashlist, time_recorder -from .common.logic import (Clauses, PycoSatSolver, PyCryptoSatSolver, PySatSolver, TRUE, - minimal_unsatisfiable_subset) +from .common.logic import ( + TRUE, + Clauses, + PycoSatSolver, + PyCryptoSatSolver, + PySatSolver, + minimal_unsatisfiable_subset, +) from .common.toposort import toposort -from .exceptions import (CondaDependencyError, InvalidSpec, ResolvePackageNotFound, - UnsatisfiableError) +from .exceptions import ( + CondaDependencyError, + InvalidSpec, + ResolvePackageNotFound, + UnsatisfiableError, +) from .models.channel import Channel, MultiChannel from .models.enums import NoarchType, PackageType from .models.match_spec import MatchSpec @@ -28,7 +38,7 @@ from .models.version import VersionOrder log = getLogger(__name__) -stdoutlog = getLogger('conda.stdoutlog') +stdoutlog = getLogger("conda.stdoutlog") # used in conda build Unsatisfiable = UnsatisfiableError @@ -55,7 +65,9 @@ def try_out_solver(sat_solver): try: try_out_solver(sat_solver) except Exception as e: - log.warning("Could not run SAT solver through interface '%s'.", sat_solver_choice) + log.warning( + "Could not run SAT solver through interface '%s'.", sat_solver_choice + ) log.debug("SAT interface error due to: %s", e, exc_info=True) else: log.debug("Using SAT solver interface '%s'.", sat_solver_choice) @@ -64,12 +76,17 @@ def try_out_solver(sat_solver): try: try_out_solver(sat_solver) except Exception as e: - log.debug("Attempted SAT interface '%s' but unavailable due to: %s", - sat_solver_choice, e) + log.debug( + "Attempted SAT interface '%s' but unavailable due to: %s", + sat_solver_choice, + e, + ) else: log.debug("Falling back to SAT solver interface '%s'.", sat_solver_choice) return sat_solver - raise CondaDependencyError("Cannot run solver. No functioning SAT implementations available.") + raise CondaDependencyError( + "Cannot run solver. No functioning SAT implementations available." + ) def exactness_and_number_of_deps(resolve_obj, ms): @@ -89,12 +106,13 @@ def exactness_and_number_of_deps(resolve_obj, ms): class Resolve: - def __init__(self, index, processed=False, channels=()): self.index = index self.channels = channels - self._channel_priorities_map = self._make_channel_priorities(channels) if channels else {} + self._channel_priorities_map = ( + self._make_channel_priorities(channels) if channels else {} + ) self._channel_priority = context.channel_priority self._solver_ignore_timestamps = context.solver_ignore_timestamps @@ -119,8 +137,14 @@ def __init__(self, index, processed=False, channels=()): self._pool_cache = {} self._strict_channel_cache = {} - self._system_precs = {_ for _ in index if ( - hasattr(_, 'package_type') and _.package_type == PackageType.VIRTUAL_SYSTEM)} + self._system_precs = { + _ + for _ in index + if ( + hasattr(_, "package_type") + and _.package_type == PackageType.VIRTUAL_SYSTEM + ) + } # sorting these in reverse order is effectively prioritizing # constraint behavior from newer packages. It is applying broadening @@ -151,7 +175,9 @@ def default_filter(self, features=None, filter=None): else: filter.clear() - filter.update({make_feature_record(fstr): False for fstr in self.trackers.keys()}) + filter.update( + {make_feature_record(fstr): False for fstr in self.trackers.keys()} + ) if features: filter.update({make_feature_record(fstr): True for fstr in features}) return filter @@ -172,12 +198,16 @@ def valid(self, spec_or_prec, filter, optional=True): If filter is supplied and update is True, it will be updated with the search results. """ + def v_(spec): return v_ms_(spec) if isinstance(spec, MatchSpec) else v_fkey_(spec) def v_ms_(ms): - return (optional and ms.optional - or any(v_fkey_(fkey) for fkey in self.find_matches(ms))) + return ( + optional + and ms.optional + or any(v_fkey_(fkey) for fkey in self.find_matches(ms)) + ) def v_fkey_(prec): val = filter.get(prec) @@ -203,8 +233,10 @@ def is_valid(_spec_or_prec): @memoizemethod def is_valid_spec(_spec): - return optional and _spec.optional or any( - is_valid_prec(_prec) for _prec in self.find_matches(_spec) + return ( + optional + and _spec.optional + or any(is_valid_prec(_prec) for _prec in self.find_matches(_spec)) ) def is_valid_prec(prec): @@ -212,11 +244,15 @@ def is_valid_prec(prec): if val is None: filter_out[prec] = False try: - has_valid_deps = all(is_valid_spec(ms) for ms in self.ms_depends(prec)) + has_valid_deps = all( + is_valid_spec(ms) for ms in self.ms_depends(prec) + ) except InvalidSpec: val = filter_out[prec] = "invalid dep specs" else: - val = filter_out[prec] = False if has_valid_deps else "invalid depends specs" + val = filter_out[prec] = ( + False if has_valid_deps else "invalid depends specs" + ) return not val return is_valid(spec_or_prec) @@ -236,6 +272,7 @@ def invalid_chains(self, spec, filter, optional=True): Returns: A tuple of tuples, empty if the MatchSpec is valid. """ + def chains_(spec, names): if spec.name in names: return @@ -275,60 +312,86 @@ def verify_specs(self, specs): bad_deps = [] feature_names = set() for ms in specs: - _feature_names = ms.get_exact_value('track_features') + _feature_names = ms.get_exact_value("track_features") if _feature_names: feature_names.update(_feature_names) else: non_tf_specs.append(ms) - bad_deps.extend((spec, ) for spec in non_tf_specs if (not spec.optional and - not self.find_matches(spec))) + bad_deps.extend( + (spec,) + for spec in non_tf_specs + if (not spec.optional and not self.find_matches(spec)) + ) if bad_deps: raise ResolvePackageNotFound(bad_deps) return tuple(non_tf_specs), feature_names - def _classify_bad_deps(self, bad_deps, specs_to_add, history_specs, strict_channel_priority): - classes = {'python': set(), - 'request_conflict_with_history': set(), - 'direct': set(), - 'virtual_package': set(), - } + def _classify_bad_deps( + self, bad_deps, specs_to_add, history_specs, strict_channel_priority + ): + classes = { + "python": set(), + "request_conflict_with_history": set(), + "direct": set(), + "virtual_package": set(), + } specs_to_add = {MatchSpec(_) for _ in specs_to_add or []} history_specs = {MatchSpec(_) for _ in history_specs or []} for chain in bad_deps: # sometimes chains come in as strings - if len(chain) > 1 and chain[-1].name == 'python' and \ - not any(_.name == 'python' for _ in specs_to_add) and \ - any(_[0] for _ in bad_deps if _[0].name == 'python'): - python_first_specs = [_[0] for _ in bad_deps if _[0].name == 'python'] + if ( + len(chain) > 1 + and chain[-1].name == "python" + and not any(_.name == "python" for _ in specs_to_add) + and any(_[0] for _ in bad_deps if _[0].name == "python") + ): + python_first_specs = [_[0] for _ in bad_deps if _[0].name == "python"] if python_first_specs: python_spec = python_first_specs[0] - if not (set(self.find_matches(python_spec)) & - set(self.find_matches(chain[-1]))): - classes['python'].add((tuple([chain[0], chain[-1]]), - str(MatchSpec(python_spec, target=None)))) - elif chain[-1].name.startswith('__'): + if not ( + set(self.find_matches(python_spec)) + & set(self.find_matches(chain[-1])) + ): + classes["python"].add( + ( + tuple([chain[0], chain[-1]]), + str(MatchSpec(python_spec, target=None)), + ) + ) + elif chain[-1].name.startswith("__"): version = [_ for _ in self._system_precs if _.name == chain[-1].name] - virtual_package_version = version[0].version if version else "not available" - classes['virtual_package'].add((tuple(chain), virtual_package_version)) + virtual_package_version = ( + version[0].version if version else "not available" + ) + classes["virtual_package"].add((tuple(chain), virtual_package_version)) elif chain[0] in specs_to_add: match = False for spec in history_specs: if spec.name == chain[-1].name: - classes['request_conflict_with_history'].add(( - tuple(chain), str(MatchSpec(spec, target=None)))) + classes["request_conflict_with_history"].add( + (tuple(chain), str(MatchSpec(spec, target=None))) + ) match = True if not match: - classes['direct'].add((tuple(chain), str(MatchSpec(chain[0], target=None)))) + classes["direct"].add( + (tuple(chain), str(MatchSpec(chain[0], target=None))) + ) else: - if len(chain) > 1 or any(len(c) >= 1 and c[0] == chain[0] for c in bad_deps): - classes['direct'].add((tuple(chain), - str(MatchSpec(chain[0], target=None)))) - - if classes['python']: + if len(chain) > 1 or any( + len(c) >= 1 and c[0] == chain[0] for c in bad_deps + ): + classes["direct"].add( + (tuple(chain), str(MatchSpec(chain[0], target=None))) + ) + + if classes["python"]: # filter out plain single-entry python conflicts. The python section explains these. - classes['direct'] = [_ for _ in classes['direct'] - if _[1].startswith('python ') or len(_[0]) > 1] + classes["direct"] = [ + _ + for _ in classes["direct"] + if _[1].startswith("python ") or len(_[0]) > 1 + ] return classes def find_matches_with_strict(self, ms, strict_channel_priority): @@ -341,15 +404,19 @@ def find_matches_with_strict(self, ms, strict_channel_priority): def find_conflicts(self, specs, specs_to_add=None, history_specs=None): if context.unsatisfiable_hints: if not context.json: - print("\nFound conflicts! Looking for incompatible packages.\n" - "This can take several minutes. Press CTRL-C to abort.") + print( + "\nFound conflicts! Looking for incompatible packages.\n" + "This can take several minutes. Press CTRL-C to abort." + ) bad_deps = self.build_conflict_map(specs, specs_to_add, history_specs) else: bad_deps = {} strict_channel_priority = context.channel_priority == ChannelPriority.STRICT raise UnsatisfiableError(bad_deps, strict=strict_channel_priority) - def breadth_first_search_for_dep_graph(self, root_spec, target_name, dep_graph, num_targets=1): + def breadth_first_search_for_dep_graph( + self, root_spec, target_name, dep_graph, num_targets=1 + ): """Return shorted path from root_spec to target_name""" queue = [] queue.append([root_spec]) @@ -370,8 +437,9 @@ def breadth_first_search_for_dep_graph(self, root_spec, target_name, dep_graph, else: target_paths.append(path) - found_all_targets = len(target_paths) == num_targets and \ - any(len(_) != len(path) for _ in queue) + found_all_targets = len(target_paths) == num_targets and any( + len(_) != len(path) for _ in queue + ) if len(queue) == 0 or found_all_targets: return target_paths sub_graph = dep_graph @@ -451,12 +519,16 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None): specs = set(self.ms_depends(matches[0])) specs.update({_.to_match_spec() for _ in self._system_precs}) for spec in specs: - self._get_package_pool((spec, )) + self._get_package_pool((spec,)) dep_graph = {} dep_list = {} - with tqdm(total=len(specs), desc="Building graph of deps", - leave=False, disable=context.json) as t: + with tqdm( + total=len(specs), + desc="Building graph of deps", + leave=False, + disable=context.json, + ) as t: for spec in specs: t.set_description(f"Examining {spec}") t.update() @@ -486,16 +558,23 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None): elif k.startswith("__") and any(s for s in set_v if s.name != k): conflicting_pkgs_pkgs[set_v] = [k] - with tqdm(total=len(specs), desc="Determining conflicts", - leave=False, disable=context.json) as t: + with tqdm( + total=len(specs), + desc="Determining conflicts", + leave=False, + disable=context.json, + ) as t: for roots, nodes in conflicting_pkgs_pkgs.items(): - t.set_description("Examining conflict for {}".format( - " ".join(_.name for _ in roots))) + t.set_description( + "Examining conflict for {}".format(" ".join(_.name for _ in roots)) + ) t.update() lroots = [_ for _ in roots] current_shortest_chain = [] shortest_node = None - requested_spec_unsat = frozenset(nodes).intersection({_.name for _ in roots}) + requested_spec_unsat = frozenset(nodes).intersection( + {_.name for _ in roots} + ) if requested_spec_unsat: chains.append([_ for _ in roots if _.name in requested_spec_unsat]) shortest_node = chains[-1][0] @@ -504,26 +583,31 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None): search_node = shortest_node.name num_occurances = dep_list[search_node].count(root) c = self.breadth_first_search_for_dep_graph( - root, search_node, dep_graph, num_occurances) + root, search_node, dep_graph, num_occurances + ) chains.extend(c) else: for node in nodes: num_occurances = dep_list[node].count(lroots[0]) chain = self.breadth_first_search_for_dep_graph( - lroots[0], node, dep_graph, num_occurances) + lroots[0], node, dep_graph, num_occurances + ) chains.extend(chain) - if len(current_shortest_chain) == 0 or \ - len(chain) < len(current_shortest_chain): + if len(current_shortest_chain) == 0 or len(chain) < len( + current_shortest_chain + ): current_shortest_chain = chain shortest_node = node for root in lroots[1:]: num_occurances = dep_list[shortest_node].count(root) c = self.breadth_first_search_for_dep_graph( - root, shortest_node, dep_graph, num_occurances) + root, shortest_node, dep_graph, num_occurances + ) chains.extend(c) - bad_deps = self._classify_bad_deps(chains, specs_to_add, history_specs, - strict_channel_priority) + bad_deps = self._classify_bad_deps( + chains, specs_to_add, history_specs, strict_channel_priority + ) return bad_deps def _get_strict_channel(self, package_name): @@ -532,10 +616,19 @@ def _get_strict_channel(self, package_name): channel_name = self._strict_channel_cache[package_name] except KeyError: if package_name in self.groups: - all_channel_names = {prec.channel.name for prec in self.groups[package_name]} - by_cp = {self._channel_priorities_map.get(cn, 1): cn for cn in all_channel_names} - highest_priority = sorted(by_cp)[0] # highest priority is the lowest number - channel_name = self._strict_channel_cache[package_name] = by_cp[highest_priority] + all_channel_names = { + prec.channel.name for prec in self.groups[package_name] + } + by_cp = { + self._channel_priorities_map.get(cn, 1): cn + for cn in all_channel_names + } + highest_priority = sorted(by_cp)[ + 0 + ] # highest priority is the lowest number + channel_name = self._strict_channel_cache[package_name] = by_cp[ + highest_priority + ] return channel_name @memoizemethod @@ -557,7 +650,9 @@ def _get_package_pool(self, specs): return pool @time_recorder(module_name=__name__) - def get_reduced_index(self, explicit_specs, sort_by_exactness=True, exit_on_conflict=False): + def get_reduced_index( + self, explicit_specs, sort_by_exactness=True, exit_on_conflict=False + ): # TODO: fix this import; this is bad from .core.subdir_data import make_feature_record @@ -568,27 +663,35 @@ def get_reduced_index(self, explicit_specs, sort_by_exactness=True, exit_on_conf return self._reduced_index_cache[cache_key] if log.isEnabledFor(DEBUG): - log.debug('Retrieving packages for: %s', dashlist( - sorted(str(s) for s in explicit_specs))) + log.debug( + "Retrieving packages for: %s", + dashlist(sorted(str(s) for s in explicit_specs)), + ) explicit_specs, features = self.verify_specs(explicit_specs) - filter_out = {prec: False if val else "feature not enabled" - for prec, val in self.default_filter(features).items()} + filter_out = { + prec: False if val else "feature not enabled" + for prec, val in self.default_filter(features).items() + } snames = set() top_level_spec = None cp_filter_applied = set() # values are package names if sort_by_exactness: # prioritize specs that are more exact. Exact specs will evaluate to 3, # constrained specs will evaluate to 2, and name only will be 1 - explicit_specs = sorted(list(explicit_specs), key=lambda x: ( - exactness_and_number_of_deps(self, x), x.dist_str()), reverse=True) + explicit_specs = sorted( + list(explicit_specs), + key=lambda x: (exactness_and_number_of_deps(self, x), x.dist_str()), + reverse=True, + ) # tuple because it needs to be hashable explicit_specs = tuple(explicit_specs) explicit_spec_package_pool = {} for s in explicit_specs: explicit_spec_package_pool[s.name] = explicit_spec_package_pool.get( - s.name, set()) | set(self.find_matches(s)) + s.name, set() + ) | set(self.find_matches(s)) def filter_group(_specs): # all _specs should be for the same package name @@ -610,15 +713,20 @@ def filter_group(_specs): if not filter_out.setdefault(prec, False): nold += 1 if (not self.match_any(_specs, prec)) or ( - explicit_spec_package_pool.get(name) and - prec not in explicit_spec_package_pool[name]): - filter_out[prec] = "incompatible with required spec %s" % top_level_spec + explicit_spec_package_pool.get(name) + and prec not in explicit_spec_package_pool[name] + ): + filter_out[prec] = ( + "incompatible with required spec %s" % top_level_spec + ) continue unsatisfiable_dep_specs = set() for ms in self.ms_depends(prec): if not ms.optional and not any( - rec for rec in self.find_matches(ms) - if not filter_out.get(rec, False)): + rec + for rec in self.find_matches(ms) + if not filter_out.get(rec, False) + ): unsatisfiable_dep_specs.add(ms) if unsatisfiable_dep_specs: filter_out[prec] = "unsatisfiable dependencies %s" % " ".join( @@ -630,7 +738,7 @@ def filter_group(_specs): reduced = nnew < nold if reduced: - log.debug('%s: pruned from %d -> %d' % (name, nold, nnew)) + log.debug("%s: pruned from %d -> %d" % (name, nold, nnew)) if any(ms.optional for ms in _specs): return reduced elif nnew == 0: @@ -645,15 +753,21 @@ def filter_group(_specs): if reduced or name not in snames: snames.add(name) - _dep_specs = groupby(lambda s: s.name, ( - dep_spec - for prec in group if not filter_out.get(prec, False) - for dep_spec in self.ms_depends(prec) if not dep_spec.optional - )) + _dep_specs = groupby( + lambda s: s.name, + ( + dep_spec + for prec in group + if not filter_out.get(prec, False) + for dep_spec in self.ms_depends(prec) + if not dep_spec.optional + ), + ) _dep_specs.pop("*", None) # discard track_features specs - for deps_name, deps in sorted(_dep_specs.items(), - key=lambda x: any(_.optional for _ in x[1])): + for deps_name, deps in sorted( + _dep_specs.items(), key=lambda x: any(_.optional for _ in x[1]) + ): if len(deps) >= nnew: res = filter_group(set(deps)) if res: @@ -684,20 +798,26 @@ def filter_group(_specs): return {} # Determine all valid packages in the dependency graph - reduced_index2 = {prec: prec for prec in (make_feature_record(fstr) for fstr in features)} + reduced_index2 = { + prec: prec for prec in (make_feature_record(fstr) for fstr in features) + } specs_by_name_seed = {} for s in explicit_specs: specs_by_name_seed[s.name] = specs_by_name_seed.get(s.name, []) + [s] for explicit_spec in explicit_specs: add_these_precs2 = tuple( - prec for prec in self.find_matches(explicit_spec) - if prec not in reduced_index2 and self.valid2(prec, filter_out)) + prec + for prec in self.find_matches(explicit_spec) + if prec not in reduced_index2 and self.valid2(prec, filter_out) + ) if strict_channel_priority and add_these_precs2: strict_channel_name = self._get_strict_channel(add_these_precs2[0].name) add_these_precs2 = tuple( - prec for prec in add_these_precs2 if prec.channel.name == strict_channel_name + prec + for prec in add_these_precs2 + if prec.channel.name == strict_channel_name ) reduced_index2.update((prec, prec) for prec in add_these_precs2) @@ -712,7 +832,9 @@ def filter_group(_specs): dep_specs = set(self.ms_depends(pkg)) for dep in dep_specs: specs = specs_by_name.get(dep.name, []) - if dep not in specs and (not specs or dep.strictness >= specs[0].strictness): + if dep not in specs and ( + not specs or dep.strictness >= specs[0].strictness + ): specs.insert(0, dep) specs_by_name[dep.name] = specs @@ -722,15 +844,18 @@ def filter_group(_specs): # specs_added = [] ms = dep_specs.pop() seen_specs.add(ms) - for dep_pkg in (_ for _ in self.find_matches(ms) if _ not in reduced_index2): + for dep_pkg in ( + _ for _ in self.find_matches(ms) if _ not in reduced_index2 + ): if not self.valid2(dep_pkg, filter_out): continue # expand the reduced index if not using strict channel priority, # or if using it and this package is in the appropriate channel - if (not strict_channel_priority or - (self._get_strict_channel(dep_pkg.name) == - dep_pkg.channel.name)): + if not strict_channel_priority or ( + self._get_strict_channel(dep_pkg.name) + == dep_pkg.channel.name + ): reduced_index2[dep_pkg] = dep_pkg # recurse to deps of this dep @@ -744,8 +869,12 @@ def filter_group(_specs): # behavior, but keeping these packags out of the # reduced index helps. Of course, if _another_ # package pulls it in by dependency, that's fine. - if "track_features" not in new_ms and not self._broader( - new_ms, tuple(specs_by_name.get(new_ms.name, ())) + if ( + "track_features" not in new_ms + and not self._broader( + new_ms, + tuple(specs_by_name.get(new_ms.name, ())), + ) ): dep_specs.add(new_ms) # if new_ms not in dep_specs: @@ -773,7 +902,7 @@ def find_matches(self, spec): if res is not None: return res - spec_name = spec.get_exact_value('name') + spec_name = spec.get_exact_value("name") if spec_name: candidate_precs = self.groups.get(spec_name, ()) elif spec.get_exact_value("track_features"): @@ -799,12 +928,14 @@ def ms_depends(self, prec): def version_key(self, prec, vtype=None): channel = prec.channel - channel_priority = self._channel_priorities_map.get(channel.name, 1) # TODO: ask @mcg1969 why the default value is 1 here # NOQA + channel_priority = self._channel_priorities_map.get( + channel.name, 1 + ) # TODO: ask @mcg1969 why the default value is 1 here # NOQA valid = 1 if channel_priority < MAX_CHANNEL_PRIORITY else 0 - version_comparator = VersionOrder(prec.get('version', '')) - build_number = prec.get('build_number', 0) - build_string = prec.get('build') - noarch = - int(prec.subdir == 'noarch') + version_comparator = VersionOrder(prec.get("version", "")) + build_number = prec.get("build_number", 0) + build_string = prec.get("build") + noarch = -int(prec.subdir == "noarch") if self._channel_priority != ChannelPriority.DISABLED: vkey = [valid, -channel_priority, version_comparator, build_number, noarch] else: @@ -812,7 +943,7 @@ def version_key(self, prec, vtype=None): if self._solver_ignore_timestamps: vkey.append(build_string) else: - vkey.extend((prec.get('timestamp', 0), build_string)) + vkey.extend((prec.get("timestamp", 0), build_string)) return vkey @staticmethod @@ -820,14 +951,18 @@ def _make_channel_priorities(channels): priorities_map = {} for priority_counter, chn in enumerate( itertools.chain.from_iterable( - (Channel(cc) for cc in c._channels) if isinstance(c, MultiChannel) else (c,) + (Channel(cc) for cc in c._channels) + if isinstance(c, MultiChannel) + else (c,) for c in (Channel(c) for c in channels) ) ): channel_name = chn.name if channel_name in priorities_map: continue - priorities_map[channel_name] = min(priority_counter, MAX_CHANNEL_PRIORITY - 1) + priorities_map[channel_name] = min( + priority_counter, MAX_CHANNEL_PRIORITY - 1 + ) return priorities_map def get_pkgs(self, ms, emptyok=False): # pragma: no cover @@ -844,7 +979,7 @@ def to_sat_name(val): if isinstance(val, PackageRecord): return val.dist_str() elif isinstance(val, MatchSpec): - return '@s@' + str(val) + ('?' if val.optional else '') + return "@s@" + str(val) + ("?" if val.optional else "") else: raise NotImplementedError() @@ -861,10 +996,12 @@ def push_MatchSpec(self, C, spec): return sat_name simple = spec._is_single() - nm = spec.get_exact_value('name') - tf = frozenset(_tf for _tf in ( - f.strip() for f in spec.get_exact_value('track_features') or () - ) if _tf) + nm = spec.get_exact_value("name") + tf = frozenset( + _tf + for _tf in (f.strip() for f in spec.get_exact_value("track_features") or ()) + if _tf + ) if nm: tgroup = libs = self.groups.get(nm, []) @@ -887,7 +1024,7 @@ def push_MatchSpec(self, C, spec): sat_names = [self.to_sat_name(prec) for prec in libs] if spec.optional: ms2 = MatchSpec(track_features=tf) if tf else MatchSpec(nm) - sat_names.append('!' + self.to_sat_name(ms2)) + sat_names.append("!" + self.to_sat_name(ms2)) m = C.Any(sat_names) C.name_var(m, sat_name) return sat_name @@ -912,11 +1049,13 @@ def gen_clauses(self): nkey = C.Not(self.to_sat_name(prec)) for ms in self.ms_depends(prec): # Virtual packages can't be installed, we ignore them - if not ms.name.startswith('__'): + if not ms.name.startswith("__"): C.Require(C.Or, nkey, self.push_MatchSpec(C, ms)) if log.isEnabledFor(DEBUG): - log.debug("gen_clauses returning with clause count: %d", C.get_clause_count()) + log.debug( + "gen_clauses returning with clause count: %d", C.get_clause_count() + ) return C def generate_spec_constraints(self, C, specs): @@ -924,19 +1063,26 @@ def generate_spec_constraints(self, C, specs): if log.isEnabledFor(DEBUG): log.debug( "generate_spec_constraints returning with clause count: %d", - C.get_clause_count()) + C.get_clause_count(), + ) return result def generate_feature_count(self, C): - result = {self.push_MatchSpec(C, MatchSpec(track_features=name)): 1 - for name in self.trackers.keys()} + result = { + self.push_MatchSpec(C, MatchSpec(track_features=name)): 1 + for name in self.trackers.keys() + } if log.isEnabledFor(DEBUG): log.debug( - "generate_feature_count returning with clause count: %d", C.get_clause_count()) + "generate_feature_count returning with clause count: %d", + C.get_clause_count(), + ) return result def generate_update_count(self, C, specs): - return {'!'+ms.target: 1 for ms in specs if ms.target and C.from_name(ms.target)} + return { + "!" + ms.target: 1 for ms in specs if ms.target and C.from_name(ms.target) + } def generate_feature_metric(self, C): eq = {} # a C.minimize() objective: Dict[varname, coeff] @@ -949,16 +1095,23 @@ def generate_feature_metric(self, C): prec_feats = {self.to_sat_name(prec): set(prec.features) for prec in group} active_feats = set.union(*prec_feats.values()).intersection(self.trackers) for feat in active_feats: - clause_id_for_feature = self.push_MatchSpec(C, MatchSpec(track_features=feat)) + clause_id_for_feature = self.push_MatchSpec( + C, MatchSpec(track_features=feat) + ) for prec_sat_name, features in prec_feats.items(): if feat not in features: - feature_metric_id = self.to_feature_metric_id(prec_sat_name, feat) - C.name_var(C.And(prec_sat_name, clause_id_for_feature), feature_metric_id) + feature_metric_id = self.to_feature_metric_id( + prec_sat_name, feat + ) + C.name_var( + C.And(prec_sat_name, clause_id_for_feature), + feature_metric_id, + ) eq[feature_metric_id] = 1 return eq def generate_removal_count(self, C, specs): - return {'!'+self.push_MatchSpec(C, ms.name): 1 for ms in specs} + return {"!" + self.push_MatchSpec(C, ms.name): 1 for ms in specs} def generate_install_count(self, C, specs): return {self.push_MatchSpec(C, ms.name): 1 for ms in specs if ms.optional} @@ -1054,11 +1207,11 @@ def dependency_sort(self, must_have): # is going to be updated during an operation, the unlink / link order matters. # See issue #6057. - if on_win and 'conda' in digraph: + if on_win and "conda" in digraph: for package_name, dist in must_have.items(): record = self.index.get(prec) - if hasattr(record, 'noarch') and record.noarch == NoarchType.python: - digraph[package_name].add('conda') + if hasattr(record, "noarch") and record.noarch == NoarchType.python: + digraph[package_name].add("conda") sorted_keys = toposort(digraph) must_have = must_have.copy() @@ -1070,7 +1223,7 @@ def dependency_sort(self, must_have): return result def environment_is_consistent(self, installed): - log.debug('Checking if the current environment is consistent') + log.debug("Checking if the current environment is consistent") if not installed: return None, [] sat_name_map = {} # Dict[sat_name, PackageRecord] @@ -1109,14 +1262,17 @@ def mysat(specs, add_if=False): r2 = Resolve(self.index, True, channels=self.channels) C = r2.gen_clauses() # This first result is just a single unsatisfiable core. There may be several. - final_unsat_specs = tuple(minimal_unsatisfiable_subset(specs, sat=mysat, - explicit_specs=explicit_specs)) + final_unsat_specs = tuple( + minimal_unsatisfiable_subset( + specs, sat=mysat, explicit_specs=explicit_specs + ) + ) else: final_unsat_specs = None return final_unsat_specs def bad_installed(self, installed, new_specs): - log.debug('Checking if the current environment is consistent') + log.debug("Checking if the current environment is consistent") if not installed: return None, [] sat_name_map = {} # Dict[sat_name, PackageRecord] @@ -1126,11 +1282,11 @@ def bad_installed(self, installed, new_specs): specs.append(MatchSpec(f"{prec.name} {prec.version} {prec.build}")) new_index = {prec: prec for prec in sat_name_map.values()} name_map = {p.name: p for p in new_index} - if 'python' in name_map and 'pip' not in name_map: - python_prec = new_index[name_map['python']] - if 'pip' in python_prec.depends: + if "python" in name_map and "pip" not in name_map: + python_prec = new_index[name_map["python"]] + if "pip" in python_prec.depends: # strip pip dependency from python if not installed in environment - new_deps = [d for d in python_prec.depends if d != 'pip'] + new_deps = [d for d in python_prec.depends if d != "pip"] python_prec.depends = new_deps r2 = Resolve(new_index, True, channels=self.channels) C = r2.gen_clauses() @@ -1138,31 +1294,40 @@ def bad_installed(self, installed, new_specs): solution = C.sat(constraints) limit = xtra = None if not solution or xtra: + def get_(name, snames): if name not in snames: snames.add(name) for fn in self.groups.get(name, []): for ms in self.ms_depends(fn): get_(ms.name, snames) + # New addition: find the largest set of installed packages that # are consistent with each other, and include those in the # list of packages to maintain consistency with snames = set() eq_optional_c = r2.generate_removal_count(C, specs) solution, _ = C.minimize(eq_optional_c, C.sat()) - snames.update(sat_name_map[sat_name]['name'] - for sat_name in (C.from_index(s) for s in solution) - if sat_name and sat_name[0] != '!' and '@' not in sat_name) + snames.update( + sat_name_map[sat_name]["name"] + for sat_name in (C.from_index(s) for s in solution) + if sat_name and sat_name[0] != "!" and "@" not in sat_name + ) # Existing behavior: keep all specs and their dependencies for spec in new_specs: get_(MatchSpec(spec).name, snames) if len(snames) < len(sat_name_map): limit = snames - xtra = [rec for sat_name, rec in sat_name_map.items() - if rec['name'] not in snames] - log.debug('Limiting solver to the following packages: %s', ', '.join(limit)) + xtra = [ + rec + for sat_name, rec in sat_name_map.items() + if rec["name"] not in snames + ] + log.debug( + "Limiting solver to the following packages: %s", ", ".join(limit) + ) if xtra: - log.debug('Packages to be preserved: %s', xtra) + log.debug("Packages to be preserved: %s", xtra) return limit, xtra def restore_bad(self, pkgs, preserve): @@ -1173,7 +1338,7 @@ def restore_bad(self, pkgs, preserve): def install_specs(self, specs, installed, update_deps=True): specs = list(map(MatchSpec, specs)) snames = {s.name for s in specs} - log.debug('Checking satisfiability of current install') + log.debug("Checking satisfiability of current install") limit, preserve = self.bad_installed(installed, specs) for prec in installed: if prec not in self.index: @@ -1189,8 +1354,9 @@ def install_specs(self, specs, installed, update_deps=True): # TODO: fix target here spec = MatchSpec(name=name, target=prec.dist_str()) else: - spec = MatchSpec(name=name, version=version, - build=build, channel=schannel) + spec = MatchSpec( + name=name, version=version, build=build, channel=schannel + ) specs.insert(0, spec) return tuple(specs), preserve @@ -1214,7 +1380,7 @@ def remove_specs(self, specs, installed): # these matches will never match an actual package. Combined with # optional=True, this has the effect of forcing their removal. if s._is_single(): - nspecs.append(MatchSpec(s, version='@', optional=True)) + nspecs.append(MatchSpec(s, version="@", optional=True)) else: nspecs.append(MatchSpec(s, optional=True)) snames = {s.name for s in nspecs if s.name} @@ -1228,10 +1394,14 @@ def remove_specs(self, specs, installed): preserve.append(prec) else: # TODO: fix target here - nspecs.append(MatchSpec(name=nm, - version='>='+ver if ver else None, - optional=True, - target=prec.dist_str())) + nspecs.append( + MatchSpec( + name=nm, + version=">=" + ver if ver else None, + optional=True, + target=prec.dist_str(), + ) + ) return nspecs, preserve def remove(self, specs, installed): @@ -1241,8 +1411,15 @@ def remove(self, specs, installed): return pkgs @time_recorder(module_name=__name__) - def solve(self, specs, returnall=False, _remove=False, specs_to_add=None, history_specs=None, - should_retry_solve=False): + def solve( + self, + specs, + returnall=False, + _remove=False, + specs_to_add=None, + history_specs=None, + should_retry_solve=False, + ): # type: (List[str], bool) -> List[PackageRecord] if specs and not isinstance(specs[0], MatchSpec): @@ -1250,10 +1427,11 @@ def solve(self, specs, returnall=False, _remove=False, specs_to_add=None, histor specs = set(specs) if log.isEnabledFor(DEBUG): - dlist = dashlist(str( - '%i: %s target=%s optional=%s' % (i, s, s.target, s.optional)) - for i, s in enumerate(specs)) - log.debug('Solving for: %s', dlist) + dlist = dashlist( + str("%i: %s target=%s optional=%s" % (i, s, s.target, s.optional)) + for i, s in enumerate(specs) + ) + log.debug("Solving for: %s", dlist) if not specs: return () @@ -1263,7 +1441,8 @@ def solve(self, specs, returnall=False, _remove=False, specs_to_add=None, histor len0 = len(specs) reduced_index = self.get_reduced_index( - specs, exit_on_conflict=not context.unsatisfiable_hints) + specs, exit_on_conflict=not context.unsatisfiable_hints + ) if not reduced_index: # something is intrinsically unsatisfiable - either not found or # not the right version @@ -1278,7 +1457,9 @@ def solve(self, specs, returnall=False, _remove=False, specs_to_add=None, histor if not_found_packages: raise ResolvePackageNotFound(not_found_packages) elif wrong_version_packages: - raise UnsatisfiableError([[d] for d in wrong_version_packages], chains=False) + raise UnsatisfiableError( + [[d] for d in wrong_version_packages], chains=False + ) if should_retry_solve: # We don't want to call find_conflicts until our last try. # This jumps back out to conda/cli/install.py, where the @@ -1296,11 +1477,14 @@ def mysat(specs, add_if=False): # Return a solution of packages def clean(sol): - return [q for q in (C.from_index(s) for s in sol) - if q and q[0] != '!' and '@' not in q] + return [ + q + for q in (C.from_index(s) for s in sol) + if q and q[0] != "!" and "@" not in q + ] def is_converged(solution): - """ Determine if the SAT problem has converged to a single solution. + """Determine if the SAT problem has converged to a single solution. This is determined by testing for a SAT solution with the current clause set and a clause in which at least one of the packages in @@ -1339,30 +1523,32 @@ def is_converged(solution): speca.extend(MatchSpec(s) for s in specm) if log.isEnabledFor(DEBUG): - log.debug('Requested specs: %s', dashlist(sorted(str(s) for s in specr))) - log.debug('Optional specs: %s', dashlist(sorted(str(s) for s in speco))) - log.debug('All other specs: %s', dashlist(sorted(str(s) for s in speca))) - log.debug('missing specs: %s', dashlist(sorted(str(s) for s in specm))) + log.debug("Requested specs: %s", dashlist(sorted(str(s) for s in specr))) + log.debug("Optional specs: %s", dashlist(sorted(str(s) for s in speco))) + log.debug("All other specs: %s", dashlist(sorted(str(s) for s in speca))) + log.debug("missing specs: %s", dashlist(sorted(str(s) for s in specm))) # Removed packages: minimize count log.debug("Solve: minimize removed packages") if _remove: eq_optional_c = r2.generate_removal_count(C, speco) solution, obj7 = C.minimize(eq_optional_c, solution) - log.debug('Package removal metric: %d', obj7) + log.debug("Package removal metric: %d", obj7) # Requested packages: maximize versions log.debug("Solve: maximize versions of requested packages") - eq_req_c, eq_req_v, eq_req_b, eq_req_a, eq_req_t = r2.generate_version_metrics(C, specr) + eq_req_c, eq_req_v, eq_req_b, eq_req_a, eq_req_t = r2.generate_version_metrics( + C, specr + ) solution, obj3a = C.minimize(eq_req_c, solution) solution, obj3 = C.minimize(eq_req_v, solution) - log.debug('Initial package channel/version metric: %d/%d', obj3a, obj3) + log.debug("Initial package channel/version metric: %d/%d", obj3a, obj3) # Track features: minimize feature count log.debug("Solve: minimize track_feature count") eq_feature_count = r2.generate_feature_count(C) solution, obj1 = C.minimize(eq_feature_count, solution) - log.debug('Track feature count: %d', obj1) + log.debug("Track feature count: %d", obj1) # Featured packages: minimize number of featureless packages # installed when a featured alternative is feasible. @@ -1373,55 +1559,62 @@ def is_converged(solution): # considered "featureless." eq_feature_metric = r2.generate_feature_metric(C) solution, obj2 = C.minimize(eq_feature_metric, solution) - log.debug('Package misfeature count: %d', obj2) + log.debug("Package misfeature count: %d", obj2) # Requested packages: maximize builds log.debug("Solve: maximize build numbers of requested packages") solution, obj4 = C.minimize(eq_req_b, solution) - log.debug('Initial package build metric: %d', obj4) + log.debug("Initial package build metric: %d", obj4) # prefer arch packages where available for requested specs log.debug("Solve: prefer arch over noarch for requested packages") solution, noarch_obj = C.minimize(eq_req_a, solution) - log.debug('Noarch metric: %d', noarch_obj) + log.debug("Noarch metric: %d", noarch_obj) # Optional installations: minimize count if not _remove: log.debug("Solve: minimize number of optional installations") eq_optional_install = r2.generate_install_count(C, speco) solution, obj49 = C.minimize(eq_optional_install, solution) - log.debug('Optional package install metric: %d', obj49) + log.debug("Optional package install metric: %d", obj49) # Dependencies: minimize the number of packages that need upgrading log.debug("Solve: minimize number of necessary upgrades") eq_u = r2.generate_update_count(C, speca) solution, obj50 = C.minimize(eq_u, solution) - log.debug('Dependency update count: %d', obj50) + log.debug("Dependency update count: %d", obj50) # Remaining packages: maximize versions, then builds - log.debug("Solve: maximize versions and builds of indirect dependencies. " - "Prefer arch over noarch where equivalent.") + log.debug( + "Solve: maximize versions and builds of indirect dependencies. " + "Prefer arch over noarch where equivalent." + ) eq_c, eq_v, eq_b, eq_a, eq_t = r2.generate_version_metrics(C, speca) solution, obj5a = C.minimize(eq_c, solution) solution, obj5 = C.minimize(eq_v, solution) solution, obj6 = C.minimize(eq_b, solution) solution, obj6a = C.minimize(eq_a, solution) - log.debug('Additional package channel/version/build/noarch metrics: %d/%d/%d/%d', - obj5a, obj5, obj6, obj6a) + log.debug( + "Additional package channel/version/build/noarch metrics: %d/%d/%d/%d", + obj5a, + obj5, + obj6, + obj6a, + ) # Prune unnecessary packages log.debug("Solve: prune unnecessary packages") eq_c = r2.generate_package_count(C, specm) solution, obj7 = C.minimize(eq_c, solution, trymax=True) - log.debug('Weak dependency count: %d', obj7) + log.debug("Weak dependency count: %d", obj7) if not is_converged(solution): # Maximize timestamps eq_t.update(eq_req_t) solution, obj6t = C.minimize(eq_t, solution) - log.debug('Timestamp metric: %d', obj6t) + log.debug("Timestamp metric: %d", obj6t) - log.debug('Looking for alternate solutions') + log.debug("Looking for alternate solutions") nsol = 1 psolutions = [] psolution = clean(solution) @@ -1433,7 +1626,7 @@ def is_converged(solution): break nsol += 1 if nsol > 10: - log.debug('Too many solutions; terminating') + log.debug("Too many solutions; terminating") break psolution = clean(solution) psolutions.append(psolution) @@ -1444,11 +1637,14 @@ def is_converged(solution): diffs = [sorted(set(sol) - common) for sol in psols2] if not context.json: stdoutlog.info( - '\nWarning: %s possible package resolutions ' - '(only showing differing packages):%s%s' % - ('>10' if nsol > 10 else nsol, - dashlist(', '.join(diff) for diff in diffs), - '\n ... and others' if nsol > 10 else '')) + "\nWarning: %s possible package resolutions " + "(only showing differing packages):%s%s" + % ( + ">10" if nsol > 10 else nsol, + dashlist(", ".join(diff) for diff in diffs), + "\n ... and others" if nsol > 10 else "", + ) + ) # def stripfeat(sol): # return sol.split('[')[0] @@ -1464,4 +1660,6 @@ def is_converged(solution): # for psol in psolutions] # return sorted(Dist(stripfeat(dname)) for dname in psolutions[0]) - return sorted((new_index[sat_name] for sat_name in psolutions[0]), key=lambda x: x.name) + return sorted( + (new_index[sat_name] for sat_name in psolutions[0]), key=lambda x: x.name + ) diff --git a/conda/shell/bin/conda b/conda/shell/bin/conda index be052a3a5cd..8023df28a87 100755 --- a/conda/shell/bin/conda +++ b/conda/shell/bin/conda @@ -2,12 +2,13 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause # WARNING: Not a real conda entry point. Do not use other than for tests. -from os.path import abspath, join import sys +from os.path import abspath, join -_conda_root = abspath(join(__file__, '..', '..', '..', '..')) +_conda_root = abspath(join(__file__, "..", "..", "..", "..")) -if __name__ == '__main__': +if __name__ == "__main__": sys.path.insert(0, _conda_root) from conda.cli import main + sys.exit(main()) diff --git a/conda/shell/etc/profile.d/conda.csh b/conda/shell/etc/profile.d/conda.csh index f4dbde5fd33..7e48cc9cfd8 100644 --- a/conda/shell/etc/profile.d/conda.csh +++ b/conda/shell/etc/profile.d/conda.csh @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - if (! $?_CONDA_EXE) then set _CONDA_EXE="${PWD}/conda/shell/bin/conda" else diff --git a/conda/shell/etc/profile.d/conda.sh b/conda/shell/etc/profile.d/conda.sh index 929e71d919d..60b9b756103 100644 --- a/conda/shell/etc/profile.d/conda.sh +++ b/conda/shell/etc/profile.d/conda.sh @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - __conda_exe() ( "$CONDA_EXE" $_CE_M $_CE_CONDA "$@" ) diff --git a/conda/testing/__init__.py b/conda/testing/__init__.py index 603925489ea..74456c86c77 100644 --- a/conda/testing/__init__.py +++ b/conda/testing/__init__.py @@ -41,7 +41,10 @@ def conda_ensure_sys_python_is_base_env_python(): # So lets just sys.exit on that. if "CONDA_PYTHON_EXE" in os.environ: - if Path(os.environ["CONDA_PYTHON_EXE"]).resolve() != Path(sys.executable).resolve(): + if ( + Path(os.environ["CONDA_PYTHON_EXE"]).resolve() + != Path(sys.executable).resolve() + ): warnings.warn( "ERROR :: Running tests from a non-base Python interpreter. " " Tests requires installing menuinst and that causes stderr " @@ -56,9 +59,10 @@ def conda_ensure_sys_python_is_base_env_python(): def conda_move_to_front_of_PATH(): - if 'CONDA_PREFIX' in os.environ: + if "CONDA_PREFIX" in os.environ: from conda.activate import CmdExeActivator, PosixActivator - if os.name == 'nt': + + if os.name == "nt": activator_cls = CmdExeActivator else: activator_cls = PosixActivator @@ -77,17 +81,17 @@ def conda_move_to_front_of_PATH(): # cannot be used multiple times; it will only remove *one* conda # prefix from the *original* value of PATH, calling it N times will # just return the same value every time, even if you update PATH. - p = activator._remove_prefix_from_path(os.environ['CONDA_PREFIX']) + p = activator._remove_prefix_from_path(os.environ["CONDA_PREFIX"]) # Replace any non sys.prefix condabin with sys.prefix condabin new_p = [] found_condabin = False for pe in p: - if pe.endswith('condabin'): + if pe.endswith("condabin"): if not found_condabin: found_condabin = True - if join(sys.prefix, 'condabin') != pe: - condabin_path = join(sys.prefix, 'condabin') + if join(sys.prefix, "condabin") != pe: + condabin_path = join(sys.prefix, "condabin") print(f"Incorrect condabin, swapping {pe} to {condabin_path}") new_p.append(condabin_path) else: @@ -97,12 +101,12 @@ def conda_move_to_front_of_PATH(): new_path = os.pathsep.join(new_p) new_path = encode_for_env_var(new_path) - os.environ['PATH'] = new_path + os.environ["PATH"] = new_path activator = activator_cls() - p = activator._add_prefix_to_path(os.environ['CONDA_PREFIX']) + p = activator._add_prefix_to_path(os.environ["CONDA_PREFIX"]) new_path = os.pathsep.join(p) new_path = encode_for_env_var(new_path) - os.environ['PATH'] = new_path + os.environ["PATH"] = new_path def conda_check_versions_aligned(): @@ -115,20 +119,22 @@ def conda_check_versions_aligned(): # it if it disagrees. import conda - version_file = normpath(join(dirname(conda.__file__), '.version')) + + version_file = normpath(join(dirname(conda.__file__), ".version")) if isfile(version_file): version_from_file = open(version_file).read().split("\n")[0] else: version_from_file = None - git_exe = 'git.exe' if sys.platform == 'win32' else 'git' + git_exe = "git.exe" if sys.platform == "win32" else "git" version_from_git = None - for pe in os.environ.get('PATH', '').split(os.pathsep): + for pe in os.environ.get("PATH", "").split(os.pathsep): if isfile(join(pe, git_exe)): try: - cmd = join(pe, git_exe) + ' describe --tags --long' - version_from_git = check_output(cmd).decode('utf-8').split('\n')[0] + cmd = join(pe, git_exe) + " describe --tags --long" + version_from_git = check_output(cmd).decode("utf-8").split("\n")[0] from conda.auxlib.packaging import _get_version_from_git_tag + version_from_git = _get_version_from_git_tag(version_from_git) break except: @@ -137,7 +143,9 @@ def conda_check_versions_aligned(): print("WARNING :: Could not check versions.") if version_from_git and version_from_git != version_from_file: - print("WARNING :: conda/.version ({}) and git describe ({}) " - "disagree, rewriting .version".format(version_from_git, version_from_file)) - with open(version_file, 'w') as fh: + print( + "WARNING :: conda/.version ({}) and git describe ({}) " + "disagree, rewriting .version".format(version_from_git, version_from_file) + ) + with open(version_file, "w") as fh: fh.write(version_from_git) diff --git a/conda/testing/cases.py b/conda/testing/cases.py index bea725454f5..b2056fd3906 100644 --- a/conda/testing/cases.py +++ b/conda/testing/cases.py @@ -1,7 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import unittest + import pytest diff --git a/conda/testing/fixtures.py b/conda/testing/fixtures.py index bf743383daf..dcbcc59bf46 100644 --- a/conda/testing/fixtures.py +++ b/conda/testing/fixtures.py @@ -1,15 +1,16 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import warnings + import py import pytest -from conda.gateways.disk.create import TemporaryDirectory -from conda.core.subdir_data import SubdirData from conda.auxlib.ish import dals -from conda.base.context import reset_context, context +from conda.base.context import context, reset_context from conda.common.configuration import YamlRawParameter from conda.common.serialize import yaml_round_trip_load +from conda.core.subdir_data import SubdirData +from conda.gateways.disk.create import TemporaryDirectory @pytest.fixture(autouse=True) @@ -24,7 +25,7 @@ def suppress_resource_warning(): warnings.filterwarnings("ignore", category=ResourceWarning) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def tmpdir(tmpdir, request): tmpdir = TemporaryDirectory(dir=str(tmpdir)) request.addfinalizer(tmpdir.cleanup) diff --git a/conda/testing/gateways/fixtures.py b/conda/testing/gateways/fixtures.py index c51a947b003..a583c3fb1f0 100644 --- a/conda/testing/gateways/fixtures.py +++ b/conda/testing/gateways/fixtures.py @@ -1,13 +1,12 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json import os import socket from pathlib import Path -import pytest import boto3 +import pytest from botocore.client import Config from xprocess import ProcessStarter @@ -106,7 +105,9 @@ def startup_check(self, port=minio.port): try: s.connect((address, port)) except Exception as e: - print("something's wrong with %s:%d. Exception is %s" % (address, port, e)) + print( + "something's wrong with %s:%d. Exception is %s" % (address, port, e) + ) error = True finally: s.close() diff --git a/conda/testing/helpers.py b/conda/testing/helpers.py index 0bb50bd84ab..0ccef0af554 100644 --- a/conda/testing/helpers.py +++ b/conda/testing/helpers.py @@ -1,27 +1,32 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - """ Helpers for the tests """ -from contextlib import contextmanager -from functools import lru_cache import json import os -from os.path import dirname, join, abspath import re -from conda.auxlib.compat import shlex_split_unicode import sys -from tempfile import gettempdir, mkdtemp -from uuid import uuid4 +from contextlib import contextmanager +from functools import lru_cache +from os.path import abspath, dirname, join from pathlib import Path +from tempfile import gettempdir, mkdtemp from unittest.mock import patch +from uuid import uuid4 + +import pytest + +from conda.auxlib.compat import shlex_split_unicode +from conda_env.cli import main as conda_env_cli from .. import cli -from ..base.context import context, reset_context, conda_tests_ctxt_mgmt_def_pol +from ..base.context import conda_tests_ctxt_mgmt_def_pol, context, reset_context from ..common.compat import encode_arguments -from ..common.io import argv, captured as common_io_captured, env_var +from ..common.io import argv +from ..common.io import captured as common_io_captured +from ..common.io import env_var from ..core.prefix_data import PrefixData from ..core.subdir_data import SubdirData, make_feature_record from ..gateways.disk.delete import rm_rf @@ -29,16 +34,9 @@ from ..gateways.logging import initialize_logging from ..history import History from ..models.channel import Channel -from ..models.records import PackageRecord -from ..models.records import PrefixRecord +from ..models.records import PackageRecord, PrefixRecord from ..resolve import Resolve -from conda_env.cli import main as conda_env_cli - - -import pytest - - # The default value will only work if we have installed conda in development mode! TEST_DATA_DIR = os.environ.get( "CONDA_TEST_DATA_DIR", abspath(join(dirname(__file__), "..", "..", "tests", "data")) @@ -79,7 +77,9 @@ def captured(disallow_stderr=True): raise Exception("Got stderr output: %s" % c.stderr) -def capture_json_with_argv(command, disallow_stderr=True, ignore_stderr=False, **kwargs): +def capture_json_with_argv( + command, disallow_stderr=True, ignore_stderr=False, **kwargs +): stdout, stderr, exit_code = run_inprocess_conda_command(command, disallow_stderr) if kwargs.get("relaxed"): match = re.match(r"\A.*?({.*})", stdout, re.DOTALL) @@ -136,7 +136,9 @@ def run_inprocess_conda_command(command, disallow_stderr: bool = True): main_func = cli.main # May want to do this to command: - with argv(encode_arguments(shlex_split_unicode(command))), captured(disallow_stderr) as c: + with argv(encode_arguments(shlex_split_unicode(command))), captured( + disallow_stderr + ) as c: initialize_logging() try: exit_code = main_func() @@ -321,7 +323,9 @@ def get_index_r_1(subdir=context.subdir): channel = Channel("https://conda.anaconda.org/channel-1/%s" % subdir) sd = SubdirData(channel) with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", + "false", + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): sd._process_raw_repodata_str(json.dumps(repodata)) sd._loaded = True @@ -351,7 +355,9 @@ def get_index_r_2(subdir=context.subdir): channel = Channel("https://conda.anaconda.org/channel-2/%s" % subdir) sd = SubdirData(channel) with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", + "false", + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): sd._process_raw_repodata_str(json.dumps(repodata)) sd._loaded = True @@ -380,7 +386,9 @@ def get_index_r_4(subdir=context.subdir): channel = Channel("https://conda.anaconda.org/channel-4/%s" % subdir) sd = SubdirData(channel) with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", + "false", + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): sd._process_raw_repodata_str(json.dumps(repodata)) sd._loaded = True @@ -409,7 +417,9 @@ def get_index_r_5(subdir=context.subdir): channel = Channel("https://conda.anaconda.org/channel-5/%s" % subdir) sd = SubdirData(channel) with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", + "true", + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): sd._process_raw_repodata_str(json.dumps(repodata)) sd._loaded = True @@ -507,7 +517,9 @@ def get_index_must_unfreeze(subdir=context.subdir): channel = Channel("https://conda.anaconda.org/channel-freeze/%s" % subdir) sd = SubdirData(channel) with env_var( - "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ADD_PIP_AS_PYTHON_DEPENDENCY", + "false", + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): sd._process_raw_repodata_str(json.dumps(repodata)) sd._loaded = True @@ -572,7 +584,9 @@ def record( @contextmanager -def get_solver(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): +def get_solver( + tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() +): tmpdir = tmpdir.strpath pd = PrefixData(tmpdir) pd._PrefixData__prefix_records = { @@ -601,7 +615,9 @@ def get_solver(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), h @contextmanager -def get_solver_2(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): +def get_solver_2( + tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() +): tmpdir = tmpdir.strpath pd = PrefixData(tmpdir) pd._PrefixData__prefix_records = { @@ -630,7 +646,9 @@ def get_solver_2(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), @contextmanager -def get_solver_4(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): +def get_solver_4( + tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() +): tmpdir = tmpdir.strpath pd = PrefixData(tmpdir) pd._PrefixData__prefix_records = { @@ -659,7 +677,9 @@ def get_solver_4(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), @contextmanager -def get_solver_5(tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=()): +def get_solver_5( + tmpdir, specs_to_add=(), specs_to_remove=(), prefix_records=(), history_specs=() +): tmpdir = tmpdir.strpath pd = PrefixData(tmpdir) pd._PrefixData__prefix_records = { diff --git a/conda/testing/integration.py b/conda/testing/integration.py index 38201fe61fd..1be72914f5e 100644 --- a/conda/testing/integration.py +++ b/conda/testing/integration.py @@ -1,38 +1,30 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - """ These helpers were originally defined in tests/test_create.py, but were refactored here so downstream projects can benefit from them too. """ +import json +import os +import sys from contextlib import contextmanager from functools import lru_cache -import json from logging import getLogger -import os -from os.path import ( - dirname, - exists, - isdir, - join, - lexists, -) +from os.path import dirname, exists, isdir, join, lexists from random import sample from shutil import copyfile, rmtree from subprocess import check_output -import sys from tempfile import gettempdir from uuid import uuid4 - import pytest from conda.auxlib.compat import Utf8NamedTemporaryFile from conda.auxlib.entity import EntityEncoder from conda.base.constants import PACKAGE_CACHE_MAGIC_FILE -from conda.base.context import context, reset_context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context, reset_context from conda.cli.conda_argparse import do_call from conda.cli.main import generate_parser, init_loggers from conda.common.compat import encode_arguments, on_win @@ -44,9 +36,9 @@ env_var, stderr_log_level, ) -from conda.common.url import path_to_url, escape_channel_url -from conda.core.prefix_data import PrefixData +from conda.common.url import escape_channel_url, path_to_url from conda.core.package_cache_data import PackageCacheData +from conda.core.prefix_data import PrefixData from conda.exceptions import conda_exception_handler from conda.gateways.disk.create import mkdir_p from conda.gateways.disk.delete import rm_rf @@ -57,7 +49,6 @@ from conda.models.records import PackageRecord from conda.utils import massage_arguments - TEST_LOG_LEVEL = DEBUG PYTHON_BINARY = "python.exe" if on_win else "bin/python" BIN_DIRECTORY = "Scripts" if on_win else "bin" @@ -131,7 +122,9 @@ def _get_temp_prefix(name=None, use_restricted_unicode=False): random_unicode = "".join(sample(UNICODE_CHARACTERS, len(UNICODE_CHARACTERS))) tmpdir_name = os.environ.get( "CONDA_TEST_TMPDIR_NAME", - (str(uuid4())[:4] + SPACER_CHARACTER + random_unicode) if name is None else name, + (str(uuid4())[:4] + SPACER_CHARACTER + random_unicode) + if name is None + else name, ) prefix = join(tmpdir, tmpdir_name) @@ -170,7 +163,9 @@ def make_temp_prefix(name=None, use_restricted_unicode=False, _temp_prefix=None) ntpath will fall over. """ if not _temp_prefix: - _temp_prefix = _get_temp_prefix(name=name, use_restricted_unicode=use_restricted_unicode) + _temp_prefix = _get_temp_prefix( + name=name, use_restricted_unicode=use_restricted_unicode + ) try: os.makedirs(_temp_prefix) except: @@ -180,7 +175,9 @@ def make_temp_prefix(name=None, use_restricted_unicode=False, _temp_prefix=None) def FORCE_temp_prefix(name=None, use_restricted_unicode=False): - _temp_prefix = _get_temp_prefix(name=name, use_restricted_unicode=use_restricted_unicode) + _temp_prefix = _get_temp_prefix( + name=name, use_restricted_unicode=use_restricted_unicode + ) rm_rf(_temp_prefix) os.makedirs(_temp_prefix) assert isdir(_temp_prefix) @@ -214,7 +211,6 @@ def temp_chdir(target_dir): def run_command(command, prefix, *arguments, **kwargs): - assert isinstance(arguments, tuple), "run_command() arguments must be tuples" arguments = massage_arguments(arguments) @@ -274,8 +270,13 @@ def run_command(command, prefix, *arguments, **kwargs): init_loggers(context) cap_args = () if not kwargs.get("no_capture") else (None, None) # list2cmdline is not exact, but it is only informational. - print("\n\nEXECUTING COMMAND >>> $ conda %s\n\n" % " ".join(arguments), file=sys.stderr) - with stderr_log_level(TEST_LOG_LEVEL, "conda"), stderr_log_level(TEST_LOG_LEVEL, "requests"): + print( + "\n\nEXECUTING COMMAND >>> $ conda %s\n\n" % " ".join(arguments), + file=sys.stderr, + ) + with stderr_log_level(TEST_LOG_LEVEL, "conda"), stderr_log_level( + TEST_LOG_LEVEL, "requests" + ): arguments = encode_arguments(arguments) is_run = arguments[0] == "run" if is_run: @@ -329,7 +330,9 @@ def make_temp_env(*packages, **kwargs): if "CONDA_TEST_SAVE_TEMPS" not in os.environ: rmtree(prefix, ignore_errors=True) else: - log.warning(f"CONDA_TEST_SAVE_TEMPS :: retaining make_temp_env {prefix}") + log.warning( + f"CONDA_TEST_SAVE_TEMPS :: retaining make_temp_env {prefix}" + ) @contextmanager @@ -340,7 +343,9 @@ def make_temp_package_cache(): touch(join(pkgs_dir, PACKAGE_CACHE_MAGIC_FILE)) try: - with env_var("CONDA_PKGS_DIRS", pkgs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_PKGS_DIRS", pkgs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert context.pkgs_dirs == (pkgs_dir,) yield pkgs_dir finally: @@ -357,7 +362,9 @@ def make_temp_channel(packages): with make_temp_env(*package_reqs) as prefix: for package in packages: assert package_is_installed(prefix, package.replace("-", "=")) - data = [p for p in PrefixData(prefix).iter_records() if p["name"] in package_names] + data = [ + p for p in PrefixData(prefix).iter_records() if p["name"] in package_names + ] run_command(Commands.REMOVE, prefix, *package_names) for package in packages: assert not package_is_installed(prefix, package.replace("-", "=")) @@ -440,7 +447,8 @@ def _package_is_installed(prefix, spec): prefix_recs = tuple(PrefixData(prefix).query(spec)) if len(prefix_recs) > 1: raise AssertionError( - "Multiple packages installed.%s" % (dashlist(prec.dist_str() for prec in prefix_recs)) + "Multiple packages installed.%s" + % (dashlist(prec.dist_str() for prec in prefix_recs)) ) return bool(len(prefix_recs)) @@ -449,7 +457,8 @@ def get_conda_list_tuple(prefix, package_name): stdout, stderr, _ = run_command(Commands.LIST, prefix) stdout_lines = stdout.split("\n") package_line = next( - (line for line in stdout_lines if line.lower().startswith(package_name + " ")), None + (line for line in stdout_lines if line.lower().startswith(package_name + " ")), + None, ) return package_line.split() diff --git a/conda/testing/notices/fixtures.py b/conda/testing/notices/fixtures.py index 05da67a4dd0..97a343cdf35 100644 --- a/conda/testing/notices/fixtures.py +++ b/conda/testing/notices/fixtures.py @@ -1,9 +1,8 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from pathlib import Path - from unittest import mock + import pytest from conda.base.constants import NOTICES_CACHE_SUBDIR @@ -26,7 +25,9 @@ def notices_cache_dir(tmpdir): @pytest.fixture(scope="function") def notices_mock_http_session_get(): - with mock.patch("conda.gateways.connection.session.CondaSession.get") as session_get: + with mock.patch( + "conda.gateways.connection.session.CondaSession.get" + ) as session_get: yield session_get diff --git a/conda/testing/notices/helpers.py b/conda/testing/notices/helpers.py index 6768f259e35..b6cf359e8ae 100644 --- a/conda/testing/notices/helpers.py +++ b/conda/testing/notices/helpers.py @@ -3,19 +3,19 @@ from __future__ import annotations import datetime -import uuid import json import os +import uuid from itertools import chain from pathlib import Path from typing import Sequence from unittest import mock from conda.base.context import Context +from conda.models.channel import get_channel_objs from conda.notices.cache import get_notices_cache_file from conda.notices.core import get_channel_name_and_urls from conda.notices.types import ChannelNoticeResponse -from conda.models.channel import get_channel_objs DEFAULT_NOTICE_MESG = "Here is an example message that will be displayed to users" @@ -80,7 +80,8 @@ def offset_cache_file_mtime(mtime_offset) -> None: """ cache_file = get_notices_cache_file() os.utime( - cache_file, times=(cache_file.stat().st_atime, cache_file.stat().st_mtime - mtime_offset) + cache_file, + times=(cache_file.stat().st_atime, cache_file.stat().st_mtime - mtime_offset), ) @@ -97,7 +98,10 @@ def __init__(self, **kwargs): def notices_decorator_assert_message_in_stdout( - captured, messages: Sequence[str], dummy_mesg: Optional[str] = None, not_in: bool = False + captured, + messages: Sequence[str], + dummy_mesg: Optional[str] = None, + not_in: bool = False, ): """ Tests a run of notices decorator where we expect to see the messages diff --git a/conda/testing/solver_helpers.py b/conda/testing/solver_helpers.py index 1c4ec023401..f5ace1665ff 100644 --- a/conda/testing/solver_helpers.py +++ b/conda/testing/solver_helpers.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from __future__ import annotations import collections @@ -11,16 +10,20 @@ import pytest -from ..exceptions import PackagesNotFoundError, ResolvePackageNotFound, UnsatisfiableError from ..base.context import context from ..core.solve import Solver +from ..exceptions import ( + PackagesNotFoundError, + ResolvePackageNotFound, + UnsatisfiableError, +) from ..models.channel import Channel -from ..models.records import PackageRecord from ..models.match_spec import MatchSpec +from ..models.records import PackageRecord from . import helpers -@functools.lru_cache() +@functools.lru_cache def index_packages(num): """Get the index data of the ``helpers.get_index_r_*`` helpers.""" # XXX: get_index_r_X should probably be refactored to avoid loading the environment like this. @@ -111,7 +114,11 @@ def _channel_packages(self): def _package_data(self, record): """Turn record into data, to be written in the JSON environment/repo files.""" - data = {key: value for key, value in vars(record).items() if key in self.REPO_DATA_KEYS} + data = { + key: value + for key, value in vars(record).items() + if key in self.REPO_DATA_KEYS + } if "subdir" not in data: data["subdir"] = context.subdir return data @@ -123,7 +130,9 @@ def _write_installed_packages(self): conda_meta.mkdir(exist_ok=True, parents=True) # write record files for record in self.installed_packages: - record_path = conda_meta / f"{record.name}-{record.version}-{record.build}.json" + record_path = ( + conda_meta / f"{record.name}-{record.version}-{record.build}.json" + ) record_data = self._package_data(record) record_data["channel"] = record.channel.name record_path.write_text(json.dumps(record_data)) @@ -218,7 +227,9 @@ def assert_unsatisfiable(self, exc_info, entries): assert issubclass(exc_info.type, UnsatisfiableError) if exc_info.type is UnsatisfiableError: assert ( - sorted(tuple(map(str, entries)) for entries in exc_info.value.unsatisfiable) + sorted( + tuple(map(str, entries)) for entries in exc_info.value.unsatisfiable + ) == entries ) @@ -265,7 +276,9 @@ def test_iopro_nomkl(self, env): def test_mkl(self, env): env.repo_packages = index_packages(1) - assert env.install("mkl") == env.install("mkl 11*", MatchSpec(track_features="mkl")) + assert env.install("mkl") == env.install( + "mkl 11*", MatchSpec(track_features="mkl") + ) def test_accelerate(self, env): env.repo_packages = index_packages(1) @@ -469,8 +482,12 @@ def test_unsat_shortest_chain_4(self, env): helpers.record(name="a", depends=["py =3.7.1"]), helpers.record(name="py_req_1"), helpers.record(name="py_req_2"), - helpers.record(name="py", version="3.7.1", depends=["py_req_1", "py_req_2"]), - helpers.record(name="py", version="3.6.1", depends=["py_req_1", "py_req_2"]), + helpers.record( + name="py", version="3.7.1", depends=["py_req_1", "py_req_2"] + ), + helpers.record( + name="py", version="3.6.1", depends=["py_req_1", "py_req_2"] + ), ] with pytest.raises(UnsatisfiableError) as exc_info: env.install("a", "py=3.6.1") @@ -609,8 +626,14 @@ def test_timestamps_and_deps(self, env): # this is testing that previously installed reqs are not disrupted # by newer timestamps. regression test of sorts for # https://github.com/conda/conda/issues/6271 - assert env.install("mypackage", *env.install("libpng 1.2.*", as_specs=True)) == records_12 - assert env.install("mypackage", *env.install("libpng 1.5.*", as_specs=True)) == records_15 + assert ( + env.install("mypackage", *env.install("libpng 1.2.*", as_specs=True)) + == records_12 + ) + assert ( + env.install("mypackage", *env.install("libpng 1.5.*", as_specs=True)) + == records_15 + ) # unspecified python version should maximize libpng (v1.5), # even though it has a lower timestamp assert env.install("mypackage") == records_15 @@ -778,7 +801,9 @@ def test_unintentional_feature_downgrade(self, env): # will be selected for install instead of a later # build of scipy 0.11.0. good_rec_match = MatchSpec("channel-1::scipy==0.11.0=np17py33_3") - good_rec = next(prec for prec in index_packages(1) if good_rec_match.match(prec)) + good_rec = next( + prec for prec in index_packages(1) if good_rec_match.match(prec) + ) bad_deps = tuple(d for d in good_rec.depends if not d.startswith("numpy")) bad_rec = PackageRecord.from_objects( good_rec, diff --git a/conda/trust/constants.py b/conda/trust/constants.py index c7d24e089a1..34833b1ec39 100644 --- a/conda/trust/constants.py +++ b/conda/trust/constants.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - # You could argue that the signatures being here is not necessary; indeed, we # are not necessarily going to be able to check them *properly* (based on some # prior expectations) as the user, since this is the beginning of trust @@ -25,7 +24,9 @@ "signed": { "delegations": { "key_mgr": { - "pubkeys": ["f24c813d23a9b26be665eee5c54680c35321061b337f862385ed6d783b0bedb0"], + "pubkeys": [ + "f24c813d23a9b26be665eee5c54680c35321061b337f862385ed6d783b0bedb0" + ], "threshold": 1, }, "root": { diff --git a/conda/trust/signature_verification.py b/conda/trust/signature_verification.py index d79b1c52cf7..0362ee17381 100644 --- a/conda/trust/signature_verification.py +++ b/conda/trust/signature_verification.py @@ -1,13 +1,12 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import json +import warnings from functools import lru_cache from glob import glob -import json from logging import getLogger from os import makedirs -from os.path import basename, isdir, join, exists -import warnings +from os.path import basename, exists, isdir, join from ..base.context import context from ..common.url import join_url @@ -16,11 +15,11 @@ from .constants import INITIAL_TRUST_ROOT, KEY_MGR_FILE try: - from conda_content_trust.authentication import verify_root, verify_delegation + from conda_content_trust.authentication import verify_delegation, verify_root from conda_content_trust.common import ( + SignatureError, load_metadata_from_file, write_metadata_to_file, - SignatureError, ) from conda_content_trust.signing import wrap_as_signable except ImportError: @@ -65,7 +64,9 @@ def enabled(self): # ensure the trusted_root exists if self.trusted_root is None: - log.warn("could not find trusted_root data for metadata signature verification") + log.warn( + "could not find trusted_root data for metadata signature verification" + ) return False # ensure the key_mgr exists @@ -84,7 +85,9 @@ def trusted_root(self): trusted = INITIAL_TRUST_ROOT # Load current trust root metadata from filesystem - for path in sorted(glob(join(context.av_data_dir, "[0-9]*.root.json")), reverse=True): + for path in sorted( + glob(join(context.av_data_dir, "[0-9]*.root.json")), reverse=True + ): try: int(basename(path).split(".")[0]) except ValueError: @@ -96,7 +99,8 @@ def trusted_root(self): break else: log.debug( - f"No root metadata in {context.av_data_dir}. " "Using built-in root metadata." + f"No root metadata in {context.av_data_dir}. " + "Using built-in root metadata." ) # Refresh trust root metadata @@ -171,7 +175,9 @@ def key_mgr(self): def session(self): return CondaSession() - def _fetch_channel_signing_data(self, signing_data_url, filename, etag=None, mod_stamp=None): + def _fetch_channel_signing_data( + self, signing_data_url, filename, etag=None, mod_stamp=None + ): if not context.ssl_verify: warnings.simplefilter("ignore", InsecureRequestWarning) @@ -197,7 +203,10 @@ def _fetch_channel_signing_data(self, signing_data_url, filename, etag=None, mod headers=headers, proxies=self.session.proxies, auth=lambda r: r, - timeout=(context.remote_connect_timeout_secs, context.remote_read_timeout_secs), + timeout=( + context.remote_connect_timeout_secs, + context.remote_read_timeout_secs, + ), ) resp.raise_for_status() @@ -214,7 +223,9 @@ def _fetch_channel_signing_data(self, signing_data_url, filename, etag=None, mod return resp.json() except json.decoder.JSONDecodeError as err: # noqa # TODO: additional loading and error handling improvements? - raise ValueError(f"Invalid JSON returned from {signing_data_url}/{filename}") + raise ValueError( + f"Invalid JSON returned from {signing_data_url}/{filename}" + ) def __call__(self, info, fn, signatures): if not self.enabled or fn not in signatures: diff --git a/conda/utils.py b/conda/utils.py index d97461dd6ed..188d3cc7a83 100644 --- a/conda/utils.py +++ b/conda/utils.py @@ -2,26 +2,26 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from contextlib import contextmanager -from functools import lru_cache, wraps import logging -from os.path import abspath, join, isfile, basename, dirname -from os import environ, PathLike -from pathlib import Path import re import sys +from contextlib import contextmanager +from functools import lru_cache, wraps +from os import PathLike, environ +from os.path import abspath, basename, dirname, isfile, join +from pathlib import Path from . import CondaError -from .auxlib.compat import shlex_split_unicode, Utf8NamedTemporaryFile -from .common.compat import on_win, isiterable -from .common.path import win_path_to_unix, which +from .auxlib.compat import Utf8NamedTemporaryFile, shlex_split_unicode +from .common.compat import isiterable, on_win +from .common.path import which, win_path_to_unix from .common.url import path_to_url from .deprecations import deprecated from .gateways.disk.read import compute_sum - log = logging.getLogger(__name__) + def path_identity(path): """Used as a dummy path converter where no conversion necessary""" return path @@ -40,13 +40,14 @@ def unix_path_to_win(path, root_prefix=""): def _translation(found_path): group = found_path.group(0) return "{}:{}".format( - group[len(root_prefix) + 1], group[len(root_prefix) + 2:].replace("/", "\\") + group[len(root_prefix) + 1], + group[len(root_prefix) + 2 :].replace("/", "\\"), ) translation = re.sub(path_re, _translation, path) - translation = re.sub(":([a-zA-Z]):\\\\", - lambda match: ";" + match.group(0)[1] + ":\\", - translation) + translation = re.sub( + ":([a-zA-Z]):\\\\", lambda match: ";" + match.group(0)[1] + ":\\", translation + ) return translation @@ -78,15 +79,15 @@ def human_bytes(n): '93.13 GB' """ if n < 1024: - return '%d B' % n - k = n/1024 + return "%d B" % n + k = n / 1024 if k < 1024: - return '%d KB' % round(k) - m = k/1024 + return "%d KB" % round(k) + m = k / 1024 if m < 1024: - return '%.1f MB' % m - g = m/1024 - return '%.2f GB' % g + return "%.1f MB" % m + g = m / 1024 + return "%.2f GB" % g # TODO: this should be done in a more extensible way @@ -95,33 +96,33 @@ def human_bytes(n): # defaults for unix shells. Note: missing "exe" entry, which should be set to # either an executable on PATH, or a full path to an executable for a shell unix_shell_base = dict( - binpath="/bin/", # mind the trailing slash. - echo="echo", - env_script_suffix=".sh", - nul='2>/dev/null', - path_from=path_identity, - path_to=path_identity, - pathsep=":", - printdefaultenv='echo $CONDA_DEFAULT_ENV', - printpath="echo $PATH", - printps1='echo $CONDA_PROMPT_MODIFIER', - promptvar='PS1', - sep="/", - set_var='export ', - shell_args=["-l", "-c"], - shell_suffix="", - slash_convert=("\\", "/"), - source_setup="source", - test_echo_extra="", - var_format="${}", + binpath="/bin/", # mind the trailing slash. + echo="echo", + env_script_suffix=".sh", + nul="2>/dev/null", + path_from=path_identity, + path_to=path_identity, + pathsep=":", + printdefaultenv="echo $CONDA_DEFAULT_ENV", + printpath="echo $PATH", + printps1="echo $CONDA_PROMPT_MODIFIER", + promptvar="PS1", + sep="/", + set_var="export ", + shell_args=["-l", "-c"], + shell_suffix="", + slash_convert=("\\", "/"), + source_setup="source", + test_echo_extra="", + var_format="${}", ) msys2_shell_base = dict( - unix_shell_base, - path_from=unix_path_to_win, - path_to=win_path_to_unix, - binpath="/bin/", # mind the trailing slash. - printpath="python -c \"import os; print(';'.join(os.environ['PATH'].split(';')[1:]))\" | cygpath --path -f -", # NOQA + unix_shell_base, + path_from=unix_path_to_win, + path_to=win_path_to_unix, + binpath="/bin/", # mind the trailing slash. + printpath="python -c \"import os; print(';'.join(os.environ['PATH'].split(';')[1:]))\" | cygpath --path -f -", # NOQA ) if on_win: @@ -150,16 +151,16 @@ def human_bytes(n): binpath="\\Scripts\\", # mind the trailing slash. source_setup="call", test_echo_extra="", - nul='1>NUL 2>&1', - set_var='set ', + nul="1>NUL 2>&1", + set_var="set ", shell_suffix=".bat", env_script_suffix=".bat", printps1="@echo %PROMPT%", promptvar="PROMPT", # parens mismatched intentionally. See http://stackoverflow.com/questions/20691060/how-do-i-echo-a-blank-empty-line-to-the-console-from-a-windows-batch-file # NOQA printdefaultenv='IF NOT "%CONDA_DEFAULT_ENV%" == "" (\n' - 'echo %CONDA_DEFAULT_ENV% ) ELSE (\n' - 'echo()', + "echo %CONDA_DEFAULT_ENV% ) ELSE (\n" + "echo()", printpath="@echo %PATH%", exe="cmd.exe", shell_args=["/d", "/c"], @@ -174,42 +175,51 @@ def human_bytes(n): exe="bash.exe", binpath="/Scripts/", # mind the trailing slash. path_from=cygwin_path_to_win, - path_to=win_path_to_cygwin + path_to=win_path_to_cygwin, ), # bash is whichever bash is on PATH. If using Cygwin, you should use the cygwin # entry instead. The only major difference is that it handle's cygwin's /cygdrive # filesystem root. "bash.exe": dict( - msys2_shell_base, exe="bash.exe", + msys2_shell_base, + exe="bash.exe", ), "bash": dict( - msys2_shell_base, exe="bash", + msys2_shell_base, + exe="bash", ), "sh.exe": dict( - msys2_shell_base, exe="sh.exe", + msys2_shell_base, + exe="sh.exe", ), "zsh.exe": dict( - msys2_shell_base, exe="zsh.exe", + msys2_shell_base, + exe="zsh.exe", ), "zsh": dict( - msys2_shell_base, exe="zsh", + msys2_shell_base, + exe="zsh", ), } else: shells = { "bash": dict( - unix_shell_base, exe="bash", + unix_shell_base, + exe="bash", ), "dash": dict( - unix_shell_base, exe="dash", + unix_shell_base, + exe="dash", source_setup=".", ), "zsh": dict( - unix_shell_base, exe="zsh", + unix_shell_base, + exe="zsh", ), "fish": dict( - unix_shell_base, exe="fish", + unix_shell_base, + exe="fish", pathsep=" ", ), } @@ -231,7 +241,9 @@ def md5_file(path: str | PathLike) -> str: return compute_sum(path, "md5") -@deprecated("23.9", "24.3", addendum="Use `conda.gateways.disk.read.compute_sum` instead.") +@deprecated( + "23.9", "24.3", addendum="Use `conda.gateways.disk.read.compute_sum` instead." +) def hashsum_file(path: str | PathLike, mode: Literal["md5", "sha256"] = "md5") -> str: return compute_sum(path, mode) @@ -299,6 +311,7 @@ def quote(s): return f'"{s}"' return " ".join(quote(arg) for arg in args) + else: try: from shlex import join as _args_join @@ -314,8 +327,7 @@ def _args_join(args): # Ensures arguments are a tuple or a list. Strings are converted # by shlex_split_unicode() which is bad; we warn about it or else # we assert (and fix the code). -def massage_arguments(arguments, errors='assert'): - +def massage_arguments(arguments, errors="assert"): # For reference and in-case anything breaks .. # .. one of the places (run_command in conda_env/utils.py) this # gets called from used to do this too: @@ -327,49 +339,57 @@ def massage_arguments(arguments, errors='assert'): # arguments = list(map(escape_for_winpath, arguments)) if isinstance(arguments, str): - if errors == 'assert': + if errors == "assert": # This should be something like 'conda programming bug', it is an assert - assert False, 'Please ensure arguments are not strings' + assert False, "Please ensure arguments are not strings" else: arguments = shlex_split_unicode(arguments) - log.warning("Please ensure arguments is not a string; " - "used `shlex_split_unicode()` on it") + log.warning( + "Please ensure arguments is not a string; " + "used `shlex_split_unicode()` on it" + ) if not isiterable(arguments): arguments = (arguments,) - assert not any([isiterable(arg) for arg in arguments]), "Individual arguments must not be iterable" # NOQA + assert not any( + [isiterable(arg) for arg in arguments] + ), "Individual arguments must not be iterable" # NOQA arguments = list(arguments) return arguments def wrap_subprocess_call( - root_prefix, - prefix, - dev_mode, - debug_wrapper_scripts, - arguments, - use_system_tmp_path=False): + root_prefix, + prefix, + dev_mode, + debug_wrapper_scripts, + arguments, + use_system_tmp_path=False, +): arguments = massage_arguments(arguments) if not use_system_tmp_path: - tmp_prefix = abspath(join(prefix, '.tmp')) + tmp_prefix = abspath(join(prefix, ".tmp")) else: tmp_prefix = None script_caller = None multiline = False - if len(arguments) == 1 and '\n' in arguments[0]: + if len(arguments) == 1 and "\n" in arguments[0]: multiline = True if on_win: comspec = get_comspec() # fail early with KeyError if undefined if dev_mode: from conda import CONDA_PACKAGE_ROOT - conda_bat = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda.bat') + + conda_bat = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda.bat") else: - conda_bat = environ.get("CONDA_BAT", - abspath(join(root_prefix, 'condabin', 'conda.bat'))) - with Utf8NamedTemporaryFile(mode='w', prefix=tmp_prefix, - suffix='.bat', delete=False) as fh: + conda_bat = environ.get( + "CONDA_BAT", abspath(join(root_prefix, "condabin", "conda.bat")) + ) + with Utf8NamedTemporaryFile( + mode="w", prefix=tmp_prefix, suffix=".bat", delete=False + ) as fh: silencer = "" if debug_wrapper_scripts else "@" fh.write(f"{silencer}ECHO OFF\n") fh.write(f"{silencer}SET PYTHONIOENCODING=utf-8\n") @@ -390,8 +410,8 @@ def wrap_subprocess_call( fh.write(f"{silencer}SET _CE_M=-m\n") fh.write(f"{silencer}SET _CE_CONDA=conda\n") if debug_wrapper_scripts: - fh.write('echo *** environment before *** 1>&2\n') - fh.write('SET 1>&2\n') + fh.write("echo *** environment before *** 1>&2\n") + fh.write("SET 1>&2\n") # Not sure there is any point in backing this up, nothing will get called with it reset # after all! # fh.write("@FOR /F \"tokens=100\" %%F IN ('chcp') DO @SET CONDA_OLD_CHCP=%%F\n") @@ -399,8 +419,8 @@ def wrap_subprocess_call( fh.write(f'{silencer}CALL "{conda_bat}" activate "{prefix}"\n') fh.write(f"{silencer}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n") if debug_wrapper_scripts: - fh.write('echo *** environment after *** 1>&2\n') - fh.write('SET 1>&2\n') + fh.write("echo *** environment after *** 1>&2\n") + fh.write("SET 1>&2\n") if multiline: # No point silencing the first line. If that's what's wanted then # it needs doing for each line and the caller may as well do that. @@ -418,9 +438,9 @@ def wrap_subprocess_call( fh.write(f"{silencer}IF %ERRORLEVEL% NEQ 0 EXIT /b %ERRORLEVEL%\n") fh.write(f"{silencer}chcp %_CONDA_OLD_CHCP%>NUL\n") script_caller = fh.name - command_args = [comspec, '/d', '/c', script_caller] + command_args = [comspec, "/d", "/c", script_caller] else: - shell_path = which('bash') or which('sh') + shell_path = which("bash") or which("sh") if shell_path is None: raise Exception("No compatible shell found!") @@ -428,14 +448,16 @@ def wrap_subprocess_call( # and have it run tests against the very latest development sources. For that to # work we need extra smarts here, we want it to be instead: if dev_mode: - conda_exe = [abspath(join(root_prefix, 'bin', 'python')), '-m', 'conda'] - dev_arg = '--dev' + conda_exe = [abspath(join(root_prefix, "bin", "python")), "-m", "conda"] + dev_arg = "--dev" dev_args = [dev_arg] else: - conda_exe = [environ.get("CONDA_EXE", abspath(join(root_prefix, 'bin', 'conda')))] - dev_arg = '' + conda_exe = [ + environ.get("CONDA_EXE", abspath(join(root_prefix, "bin", "conda"))) + ] + dev_arg = "" dev_args = [] - with Utf8NamedTemporaryFile(mode='w', prefix=tmp_prefix, delete=False) as fh: + with Utf8NamedTemporaryFile(mode="w", prefix=tmp_prefix, delete=False) as fh: if dev_mode: from . import CONDA_SOURCE_ROOT @@ -475,7 +497,8 @@ def get_comspec(): if basename(environ.get("COMSPEC", "")).lower() != "cmd.exe": for comspec in ( # %SystemRoot%\System32\cmd.exe - environ.get("SystemRoot") and join(environ["SystemRoot"], "System32", "cmd.exe"), + environ.get("SystemRoot") + and join(environ["SystemRoot"], "System32", "cmd.exe"), # %windir%\System32\cmd.exe environ.get("windir") and join(environ["windir"], "System32", "cmd.exe"), ): @@ -483,7 +506,9 @@ def get_comspec(): environ["COMSPEC"] = comspec break else: - log.warn("cmd.exe could not be found. Looked in SystemRoot and windir env vars.\n") + log.warn( + "cmd.exe could not be found. Looked in SystemRoot and windir env vars.\n" + ) # fails with KeyError if still undefined return environ["COMSPEC"] diff --git a/conda_env/__init__.py b/conda_env/__init__.py index 470cfefa9ef..cdc4a6f5f1b 100644 --- a/conda_env/__init__.py +++ b/conda_env/__init__.py @@ -1,4 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause from conda import __version__ + __version__ = __version__ diff --git a/conda_env/cli/common.py b/conda_env/cli/common.py index 75363eb6639..106a5974148 100644 --- a/conda_env/cli/common.py +++ b/conda_env/cli/common.py @@ -1,15 +1,16 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os -from os.path import isdir, join, abspath, expanduser, expandvars +from os.path import abspath, expanduser, expandvars, isdir, join -from conda.deprecations import deprecated from conda.base.context import context, determine_target_prefix from conda.cli import install as cli_install -from conda.cli.common import stdout_json as _stdout_json, stdout_json_success +from conda.cli.common import stdout_json as _stdout_json +from conda.cli.common import stdout_json_success +from conda.deprecations import deprecated from conda.gateways.connection.session import CONDA_SESSION_SCHEMES -base_env_name = 'base' +base_env_name = "base" @deprecated("23.3", "23.9", addendum="Use `conda.cli.common.stdout_json` instead.") @@ -17,7 +18,9 @@ def stdout_json(d): _stdout_json(d) -@deprecated("23.3", "23.9", addendum="Use `conda.base.context.determine_target_prefix` instead.") +@deprecated( + "23.3", "23.9", addendum="Use `conda.base.context.determine_target_prefix` instead." +) def get_prefix(args, search=True): return determine_target_prefix(context, args) @@ -27,7 +30,9 @@ def find_prefix_name(name): if name == base_env_name: return context.root_prefix # always search cwd in addition to envs dirs (for relative path access) - for envs_dir in list(context.envs_dirs) + [os.getcwd(), ]: + for envs_dir in list(context.envs_dirs) + [ + os.getcwd(), + ]: prefix = join(envs_dir, name) if isdir(prefix): return prefix diff --git a/conda_env/cli/main.py b/conda_env/cli/main.py index f51d0915819..89b9b7f2480 100644 --- a/conda_env/cli/main.py +++ b/conda_env/cli/main.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import os import sys @@ -17,8 +16,9 @@ try: from conda.exceptions import conda_exception_handler except ImportError as e: - if 'CONDA_DEFAULT_ENV' in os.environ: - sys.stderr.write(""" + if "CONDA_DEFAULT_ENV" in os.environ: + sys.stderr.write( + """ There was an error importing conda. It appears this was caused by installing conda-env into a conda @@ -34,17 +34,13 @@ environment, please open a bug report at: https://github.com/conda/conda-env -""".lstrip()) +""".lstrip() + ) sys.exit(-1) else: raise e -from . import main_create -from . import main_export -from . import main_list -from . import main_remove -from . import main_update -from . import main_config +from . import main_config, main_create, main_export, main_list, main_remove, main_update # TODO: This belongs in a helper library somewhere @@ -52,7 +48,7 @@ # merged into conda-env, this needs to be adjusted. def show_help_on_empty_command(): if len(sys.argv) == 1: # sys.argv == ['/path/to/bin/conda-env'] - sys.argv.append('--help') + sys.argv.append("--help") def create_parser(): @@ -71,10 +67,11 @@ def create_parser(): def do_call(args, parser): - relative_mod, func_name = args.func.rsplit('.', 1) + relative_mod, func_name = args.func.rsplit(".", 1) # func_name should always be 'execute' from importlib import import_module - module = import_module(relative_mod, __name__.rsplit('.', 1)[0]) + + module = import_module(relative_mod, __name__.rsplit(".", 1)[0]) exit_code = getattr(module, func_name)(args, parser) return exit_code @@ -89,5 +86,5 @@ def main(): return conda_exception_handler(do_call, args, parser) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/conda_env/cli/main_config.py b/conda_env/cli/main_config.py index fea484bcdab..1eaf1a02442 100644 --- a/conda_env/cli/main_config.py +++ b/conda_env/cli/main_config.py @@ -2,31 +2,31 @@ # SPDX-License-Identifier: BSD-3-Clause from argparse import RawDescriptionHelpFormatter - from .main_vars import configure_parser as configure_vars_parser -config_description = ''' +config_description = """ Configure a conda environment -''' +""" -config_example = ''' +config_example = """ examples: conda env config vars list conda env config --append channels conda-forge -''' +""" + def configure_parser(sub_parsers): config_parser = sub_parsers.add_parser( - 'config', + "config", formatter_class=RawDescriptionHelpFormatter, description=config_description, help=config_description, epilog=config_example, ) - config_parser.set_defaults(func='.main_config.execute') + config_parser.set_defaults(func=".main_config.execute") config_subparser = config_parser.add_subparsers() configure_vars_parser(config_subparser) def execute(args, parser): - parser.parse_args(['config', '--help']) + parser.parse_args(["config", "--help"]) diff --git a/conda_env/cli/main_create.py b/conda_env/cli/main_create.py index 69117cd2f06..a213c18f82f 100644 --- a/conda_env/cli/main_create.py +++ b/conda_env/cli/main_create.py @@ -1,23 +1,28 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from argparse import RawDescriptionHelpFormatter import json import os import sys import textwrap +from argparse import RawDescriptionHelpFormatter from conda.base.context import context, determine_target_prefix from conda.cli import install as cli_install -from conda.cli.conda_argparse import add_parser_default_packages, add_parser_json, \ - add_parser_prefix, add_parser_networking, add_parser_solver +from conda.cli.conda_argparse import ( + add_parser_default_packages, + add_parser_json, + add_parser_networking, + add_parser_prefix, + add_parser_solver, +) from conda.core.prefix_data import PrefixData from conda.gateways.disk.delete import rm_rf -from conda.notices import notices from conda.misc import touch_nonadmin -from .common import print_result, get_filename +from conda.notices import notices + from .. import specs from ..installers.base import InvalidInstaller, get_installer +from .common import get_filename, print_result description = """ Create an environment based on an environment definition file. @@ -46,17 +51,18 @@ def configure_parser(sub_parsers): p = sub_parsers.add_parser( - 'create', + "create", formatter_class=RawDescriptionHelpFormatter, description=description, help=description, epilog=example, ) p.add_argument( - '-f', '--file', - action='store', - help='Environment definition file (default: environment.yml)', - default='environment.yml', + "-f", + "--file", + action="store", + help="Environment definition file (default: environment.yml)", + default="environment.yml", ) # Add name and prefix args @@ -66,30 +72,33 @@ def configure_parser(sub_parsers): add_parser_networking(p) p.add_argument( - 'remote_definition', - help='Remote environment definition / IPython notebook', - action='store', + "remote_definition", + help="Remote environment definition / IPython notebook", + action="store", default=None, - nargs='?' + nargs="?", ) p.add_argument( - '--force', - help=('Force creation of environment (removing a previously-existing ' - 'environment of the same name).'), - action='store_true', + "--force", + help=( + "Force creation of environment (removing a previously-existing " + "environment of the same name)." + ), + action="store_true", default=False, ) p.add_argument( - '-d', '--dry-run', - help='Only display what can be done with the current command, arguments, ' - 'and other flags. Remove this flag to actually run the command.', - action='store_true', - default=False + "-d", + "--dry-run", + help="Only display what can be done with the current command, arguments, " + "and other flags. Remove this flag to actually run the command.", + action="store_true", + default=False, ) add_parser_default_packages(p) add_parser_json(p) add_parser_solver(p) - p.set_defaults(func='.main_create.execute') + p.set_defaults(func=".main_create.execute") @notices @@ -119,10 +128,12 @@ def execute(args, parser): result = {"conda": None, "pip": None} - args_packages = context.create_default_packages if not args.no_default_packages else [] + args_packages = ( + context.create_default_packages if not args.no_default_packages else [] + ) if args.dry_run: - installer_type = 'conda' + installer_type = "conda" installer = get_installer(installer_type) pkg_specs = env.dependencies.get(installer_type, []) @@ -132,7 +143,7 @@ def execute(args, parser): if args.json: print(json.dumps(solved_env.to_dict(), indent=2)) else: - print(solved_env.to_yaml(), end='') + print(solved_env.to_yaml(), end="") else: if args_packages: @@ -149,16 +160,23 @@ def execute(args, parser): for installer_type, pkg_specs in env.dependencies.items(): try: installer = get_installer(installer_type) - result[installer_type] = installer.install(prefix, pkg_specs, args, env) + result[installer_type] = installer.install( + prefix, pkg_specs, args, env + ) except InvalidInstaller: - sys.stderr.write(textwrap.dedent(""" + sys.stderr.write( + textwrap.dedent( + """ Unable to install package for {0}. Please double check and ensure your dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. - """).lstrip().format(installer_type) + """ + ) + .lstrip() + .format(installer_type) ) return -1 diff --git a/conda_env/cli/main_export.py b/conda_env/cli/main_export.py index 6131b27b624..29441cb0024 100644 --- a/conda_env/cli/main_export.py +++ b/conda_env/cli/main_export.py @@ -1,11 +1,10 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from argparse import RawDescriptionHelpFormatter from conda.base.context import context, determine_target_prefix, env_name -from conda.cli.conda_argparse import add_parser_json, add_parser_prefix from conda.cli.common import stdout_json +from conda.cli.conda_argparse import add_parser_json, add_parser_prefix from ..env import from_environment @@ -22,7 +21,7 @@ def configure_parser(sub_parsers): p = sub_parsers.add_parser( - 'export', + "export", formatter_class=RawDescriptionHelpFormatter, description=description, help=description, @@ -30,9 +29,10 @@ def configure_parser(sub_parsers): ) p.add_argument( - '-c', '--channel', - action='append', - help='Additional channel to include in the export' + "-c", + "--channel", + action="append", + help="Additional channel to include in the export", ) p.add_argument( @@ -43,7 +43,8 @@ def configure_parser(sub_parsers): add_parser_prefix(p) p.add_argument( - '-f', '--file', + "-f", + "--file", default=None, required=False, help=( @@ -54,29 +55,30 @@ def configure_parser(sub_parsers): ) p.add_argument( - '--no-builds', + "--no-builds", default=False, - action='store_true', + action="store_true", required=False, - help='Remove build specification from dependencies' + help="Remove build specification from dependencies", ) p.add_argument( - '--ignore-channels', + "--ignore-channels", default=False, - action='store_true', + action="store_true", required=False, - help='Do not include channel names with package names.') + help="Do not include channel names with package names.", + ) add_parser_json(p) p.add_argument( - '--from-history', + "--from-history", default=False, - action='store_true', + action="store_true", required=False, - help='Build environment spec from explicit specs in history' + help="Build environment spec from explicit specs in history", ) - p.set_defaults(func='.main_export.execute') + p.set_defaults(func=".main_export.execute") # TODO Make this aware of channels that were used to install packages @@ -97,8 +99,8 @@ def execute(args, parser): env.add_channels(args.channel) if args.file is None: - stdout_json(env.to_dict()) if args.json else print(env.to_yaml(), end='') + stdout_json(env.to_dict()) if args.json else print(env.to_yaml(), end="") else: - fp = open(args.file, 'wb') + fp = open(args.file, "wb") env.to_dict(stream=fp) if args.json else env.to_yaml(stream=fp) fp.close() diff --git a/conda_env/cli/main_list.py b/conda_env/cli/main_list.py index 10237bd762c..06682f56e2b 100644 --- a/conda_env/cli/main_list.py +++ b/conda_env/cli/main_list.py @@ -19,7 +19,7 @@ def configure_parser(sub_parsers): list_parser = sub_parsers.add_parser( - 'list', + "list", formatter_class=RawDescriptionHelpFormatter, description=description, help=description, @@ -28,12 +28,12 @@ def configure_parser(sub_parsers): add_parser_json(list_parser) - list_parser.set_defaults(func='.main_list.execute') + list_parser.set_defaults(func=".main_list.execute") def execute(args, parser): - info_dict = {'envs': list_all_known_prefixes()} - common.print_envs_list(info_dict['envs'], not args.json) + info_dict = {"envs": list_all_known_prefixes()} + common.print_envs_list(info_dict["envs"], not args.json) if args.json: common.stdout_json(info_dict) diff --git a/conda_env/cli/main_remove.py b/conda_env/cli/main_remove.py index af664265ceb..f74be4d862c 100644 --- a/conda_env/cli/main_remove.py +++ b/conda_env/cli/main_remove.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from argparse import Namespace, RawDescriptionHelpFormatter from conda.cli.conda_argparse import ( @@ -10,11 +9,14 @@ ) _help = "Remove an environment" -_description = _help + """ +_description = ( + _help + + """ Removes a provided environment. You must deactivate the existing environment before you can remove it. """.lstrip() +) _example = """ @@ -27,7 +29,7 @@ def configure_parser(sub_parsers): p = sub_parsers.add_parser( - 'remove', + "remove", formatter_class=RawDescriptionHelpFormatter, description=_description, help=_help, @@ -38,18 +40,29 @@ def configure_parser(sub_parsers): add_parser_solver(p) add_output_and_prompt_options(p) - p.set_defaults(func='.main_remove.execute') + p.set_defaults(func=".main_remove.execute") def execute(args, parser): import conda.cli.main_remove + args = vars(args) - args.update({ - 'all': True, 'channel': None, 'features': None, - 'override_channels': None, 'use_local': None, 'use_cache': None, - 'offline': None, 'force': True, 'pinned': None}) + args.update( + { + "all": True, + "channel": None, + "features": None, + "override_channels": None, + "use_local": None, + "use_cache": None, + "offline": None, + "force": True, + "pinned": None, + } + ) args = Namespace(**args) from conda.base.context import context + context.__init__(argparse_args=args) conda.cli.main_remove.execute(args, parser) diff --git a/conda_env/cli/main_update.py b/conda_env/cli/main_update.py index 6225505512c..be39c9b9e84 100644 --- a/conda_env/cli/main_update.py +++ b/conda_env/cli/main_update.py @@ -1,9 +1,9 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from argparse import RawDescriptionHelpFormatter import os import sys import textwrap +from argparse import RawDescriptionHelpFormatter from conda.base.context import context, determine_target_prefix from conda.cli.conda_argparse import ( @@ -16,9 +16,9 @@ from conda.misc import touch_nonadmin from conda.notices import notices -from .common import print_result, get_filename from .. import specs as install_specs from ..installers.base import InvalidInstaller, get_installer +from .common import get_filename, print_result description = """ Update the current environment based on environment file @@ -36,7 +36,7 @@ def configure_parser(sub_parsers): p = sub_parsers.add_parser( - 'update', + "update", formatter_class=RawDescriptionHelpFormatter, description=description, help=description, @@ -44,27 +44,28 @@ def configure_parser(sub_parsers): ) add_parser_prefix(p) p.add_argument( - '-f', '--file', - action='store', - help='environment definition (default: environment.yml)', - default='environment.yml', + "-f", + "--file", + action="store", + help="environment definition (default: environment.yml)", + default="environment.yml", ) p.add_argument( - '--prune', - action='store_true', + "--prune", + action="store_true", default=False, - help='remove installed packages not defined in environment.yml', + help="remove installed packages not defined in environment.yml", ) p.add_argument( - 'remote_definition', - help='remote environment definition / IPython notebook', - action='store', + "remote_definition", + help="remote environment definition / IPython notebook", + action="store", default=None, - nargs='?' + nargs="?", ) add_parser_json(p) add_parser_solver(p) - p.set_defaults(func='.main_update.execute') + p.set_defaults(func=".main_update.execute") @notices @@ -81,14 +82,16 @@ def execute(args, parser): if not env.name: # Note, this is a hack fofr get_prefix that assumes argparse results # TODO Refactor common.get_prefix - name = os.environ.get('CONDA_DEFAULT_ENV', False) + name = os.environ.get("CONDA_DEFAULT_ENV", False) if not name: msg = "Unable to determine environment\n\n" - msg += textwrap.dedent(""" + msg += textwrap.dedent( + """ Please re-run this command with one of the following options: * Provide an environment name via --name or -n - * Re-run this command inside an activated conda environment.""").lstrip() + * Re-run this command inside an activated conda environment.""" + ).lstrip() # TODO Add json support raise CondaEnvException(msg) @@ -114,14 +117,19 @@ def execute(args, parser): try: installers[installer_type] = get_installer(installer_type) except InvalidInstaller: - sys.stderr.write(textwrap.dedent(""" + sys.stderr.write( + textwrap.dedent( + """ Unable to install package for {0}. Please double check and ensure you dependencies file has the correct spelling. You might also try installing the conda-env-{0} package to see if provides the required installer. - """).lstrip().format(installer_type) + """ + ) + .lstrip() + .format(installer_type) ) return -1 diff --git a/conda_env/cli/main_vars.py b/conda_env/cli/main_vars.py index 811446f8c9a..66a1ef2ac11 100644 --- a/conda_env/cli/main_vars.py +++ b/conda_env/cli/main_vars.py @@ -5,51 +5,52 @@ from conda.base.context import context, determine_target_prefix from conda.cli import common -from conda.cli.conda_argparse import add_parser_prefix, add_parser_json +from conda.cli.conda_argparse import add_parser_json, add_parser_prefix from conda.core.prefix_data import PrefixData from conda.exceptions import EnvironmentLocationNotFound -var_description = ''' +var_description = """ Interact with environment variables associated with Conda environments -''' +""" -var_example = ''' +var_example = """ examples: conda env config vars list -n my_env conda env config vars set MY_VAR=something OTHER_THING=ohhhhya conda env config vars unset MY_VAR -''' +""" -list_description = ''' +list_description = """ List environment variables for a conda environment -''' +""" -list_example = ''' +list_example = """ examples: conda env config vars list -n my_env -''' +""" -set_description = ''' +set_description = """ Set environment variables for a conda environment -''' +""" -set_example = ''' +set_example = """ example: conda env config vars set MY_VAR=weee -''' +""" -unset_description = ''' +unset_description = """ Unset environment variables for a conda environment -''' +""" -unset_example = ''' +unset_example = """ example: conda env config vars unset MY_VAR -''' +""" + def configure_parser(sub_parsers): var_parser = sub_parsers.add_parser( - 'vars', + "vars", formatter_class=RawDescriptionHelpFormatter, description=var_description, help=var_description, @@ -58,7 +59,7 @@ def configure_parser(sub_parsers): var_subparser = var_parser.add_subparsers() list_parser = var_subparser.add_parser( - 'list', + "list", formatter_class=RawDescriptionHelpFormatter, description=list_description, help=list_description, @@ -66,39 +67,39 @@ def configure_parser(sub_parsers): ) add_parser_prefix(list_parser) add_parser_json(list_parser) - list_parser.set_defaults(func='.main_vars.execute_list') + list_parser.set_defaults(func=".main_vars.execute_list") set_parser = var_subparser.add_parser( - 'set', + "set", formatter_class=RawDescriptionHelpFormatter, description=set_description, help=set_description, epilog=set_example, ) set_parser.add_argument( - 'vars', - action='store', - nargs='*', - help='Environment variables to set in the form = separated by spaces' + "vars", + action="store", + nargs="*", + help="Environment variables to set in the form = separated by spaces", ) add_parser_prefix(set_parser) - set_parser.set_defaults(func='.main_vars.execute_set') + set_parser.set_defaults(func=".main_vars.execute_set") unset_parser = var_subparser.add_parser( - 'unset', + "unset", formatter_class=RawDescriptionHelpFormatter, description=unset_description, help=unset_description, epilog=unset_example, ) unset_parser.add_argument( - 'vars', - action='store', - nargs='*', - help='Environment variables to unset in the form separated by spaces' + "vars", + action="store", + nargs="*", + help="Environment variables to unset in the form separated by spaces", ) add_parser_prefix(unset_parser) - unset_parser.set_defaults(func='.main_vars.execute_unset') + unset_parser.set_defaults(func=".main_vars.execute_unset") def execute_list(args, parser): @@ -124,7 +125,7 @@ def execute_set(args, parser): env_vars_to_add = {} for v in args.vars: - var_def = v.split('=') + var_def = v.split("=") env_vars_to_add[var_def[0].strip()] = "=".join(var_def[1:]).strip() pd.set_environment_env_vars(env_vars_to_add) if prefix == context.active_prefix: diff --git a/conda_env/env.py b/conda_env/env.py index 353d3d39bca..a24c07021a5 100644 --- a/conda_env/env.py +++ b/conda_env/env.py @@ -1,25 +1,25 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from itertools import chain +import json import os import re -import json +from itertools import chain from conda.base.context import context -from conda.exceptions import EnvironmentFileEmpty, EnvironmentFileNotFound from conda.cli import common # TODO: this should never have to import form conda.cli -from conda.common.serialize import yaml_safe_load, yaml_safe_dump +from conda.common.iterators import groupby_to_dict as groupby +from conda.common.iterators import unique +from conda.common.serialize import yaml_safe_dump, yaml_safe_load from conda.core.prefix_data import PrefixData +from conda.exceptions import EnvironmentFileEmpty, EnvironmentFileNotFound from conda.gateways.connection.download import download_text from conda.gateways.connection.session import CONDA_SESSION_SCHEMES +from conda.history import History from conda.models.enums import PackageType from conda.models.match_spec import MatchSpec from conda.models.prefix_graph import PrefixGraph -from conda.history import History -from conda.common.iterators import groupby_to_dict as groupby, unique - -VALID_KEYS = ('name', 'dependencies', 'prefix', 'channels', 'variables') +VALID_KEYS = ("name", "dependencies", "prefix", "channels", "variables") def validate_keys(data, kwargs): @@ -32,35 +32,39 @@ def validate_keys(data, kwargs): new_data.pop(key) if invalid_keys: - filename = kwargs.get('filename') - verb = 'are' if len(invalid_keys) != 1 else 'is' - plural = 's' if len(invalid_keys) != 1 else '' - print("\nEnvironmentSectionNotValid: The following section{plural} on " - "'{filename}' {verb} invalid and will be ignored:" - "".format(filename=filename, plural=plural, verb=verb)) + filename = kwargs.get("filename") + verb = "are" if len(invalid_keys) != 1 else "is" + plural = "s" if len(invalid_keys) != 1 else "" + print( + "\nEnvironmentSectionNotValid: The following section{plural} on " + "'{filename}' {verb} invalid and will be ignored:" + "".format(filename=filename, plural=plural, verb=verb) + ) for key in invalid_keys: print(f" - {key}") print() - deps = data.get('dependencies', []) + deps = data.get("dependencies", []) depsplit = re.compile(r"[<>~\s=]") - is_pip = lambda dep: 'pip' in depsplit.split(dep)[0].split('::') + is_pip = lambda dep: "pip" in depsplit.split(dep)[0].split("::") lists_pip = any(is_pip(_) for _ in deps if not isinstance(_, dict)) for dep in deps: - if (isinstance(dep, dict) and 'pip' in dep and not lists_pip): - print("Warning: you have pip-installed dependencies in your environment file, " - "but you do not list pip itself as one of your conda dependencies. Conda " - "may not use the correct pip to install your packages, and they may end up " - "in the wrong place. Please add an explicit pip dependency. I'm adding one" - " for you, but still nagging you.") - new_data['dependencies'].insert(0, 'pip') + if isinstance(dep, dict) and "pip" in dep and not lists_pip: + print( + "Warning: you have pip-installed dependencies in your environment file, " + "but you do not list pip itself as one of your conda dependencies. Conda " + "may not use the correct pip to install your packages, and they may end up " + "in the wrong place. Please add an explicit pip dependency. I'm adding one" + " for you, but still nagging you." + ) + new_data["dependencies"].insert(0, "pip") break return new_data def load_from_directory(directory): """Load and return an ``Environment`` from a given ``directory``""" - files = ['environment.yml', 'environment.yaml'] + files = ["environment.yml", "environment.yaml"] while True: for f in files: try: @@ -75,7 +79,9 @@ def load_from_directory(directory): # TODO tests!!! -def from_environment(name, prefix, no_builds=False, ignore_channels=False, from_history=False): +def from_environment( + name, prefix, no_builds=False, ignore_channels=False, from_history=False +): """ Get environment object from prefix Args: @@ -94,8 +100,13 @@ def from_environment(name, prefix, no_builds=False, ignore_channels=False, from_ if from_history: history = History(prefix).get_requested_specs_map() deps = [str(package) for package in history.values()] - return Environment(name=name, dependencies=deps, channels=list(context.channels), - prefix=prefix, variables=variables) + return Environment( + name=name, + dependencies=deps, + channels=list(context.channels), + prefix=prefix, + variables=variables, + ) precs = tuple(PrefixGraph(pd.iter_records()).graph) grouped_precs = groupby(lambda x: x.package_type, precs) @@ -119,9 +130,9 @@ def from_environment(name, prefix, no_builds=False, ignore_channels=False, from_ ) if no_builds: - dependencies = ['='.join((a.name, a.version)) for a in conda_precs] + dependencies = ["=".join((a.name, a.version)) for a in conda_precs] else: - dependencies = ['='.join((a.name, a.version, a.build)) for a in conda_precs] + dependencies = ["=".join((a.name, a.version, a.build)) for a in conda_precs] if pip_precs: dependencies.append({"pip": [f"{a.name}=={a.version}" for a in pip_precs]}) @@ -131,8 +142,13 @@ def from_environment(name, prefix, no_builds=False, ignore_channels=False, from_ canonical_name = prec.channel.canonical_name if canonical_name not in channels: channels.insert(0, canonical_name) - return Environment(name=name, dependencies=dependencies, channels=channels, prefix=prefix, - variables=variables) + return Environment( + name=name, + dependencies=dependencies, + channels=channels, + prefix=prefix, + variables=variables, + ) def from_yaml(yamlstr, **kwargs): @@ -152,7 +168,9 @@ def from_yaml(yamlstr, **kwargs): def _expand_channels(data): """Expands environment variables for the channels found in the yaml data""" - data["channels"] = [os.path.expandvars(channel) for channel in data.get("channels", [])] + data["channels"] = [ + os.path.expandvars(channel) for channel in data.get("channels", []) + ] def from_file(filename): @@ -162,12 +180,12 @@ def from_file(filename): elif not os.path.exists(filename): raise EnvironmentFileNotFound(filename) else: - with open(filename, 'rb') as fp: + with open(filename, "rb") as fp: yamlb = fp.read() try: - yamlstr = yamlb.decode('utf-8') + yamlstr = yamlb.decode("utf-8") except UnicodeDecodeError: - yamlstr = yamlb.decode('utf-16') + yamlstr = yamlb.decode("utf-16") return from_yaml(yamlstr, filename=filename) @@ -182,19 +200,19 @@ def parse(self): if not self.raw: return - self.update({'conda': []}) + self.update({"conda": []}) for line in self.raw: if isinstance(line, dict): self.update(line) else: - self['conda'].append(common.arg2spec(line)) + self["conda"].append(common.arg2spec(line)) - if 'pip' in self: - if not self['pip']: - del self['pip'] - if not any(MatchSpec(s).name == 'pip' for s in self['conda']): - self['conda'].append('pip') + if "pip" in self: + if not self["pip"]: + del self["pip"] + if not any(MatchSpec(s).name == "pip" for s in self["conda"]): + self["conda"].append("pip") # TODO only append when it's not already present def add(self, package_name): @@ -203,8 +221,15 @@ def add(self, package_name): class Environment: - def __init__(self, name=None, filename=None, channels=None, - dependencies=None, prefix=None, variables=None): + def __init__( + self, + name=None, + filename=None, + channels=None, + dependencies=None, + prefix=None, + variables=None, + ): self.name = name self.filename = filename self.prefix = prefix @@ -224,13 +249,13 @@ def remove_channels(self): def to_dict(self, stream=None): d = {"name": self.name} if self.channels: - d['channels'] = self.channels + d["channels"] = self.channels if self.dependencies: - d['dependencies'] = self.dependencies.raw + d["dependencies"] = self.dependencies.raw if self.variables: - d['variables'] = self.variables + d["variables"] = self.variables if self.prefix: - d['prefix'] = self.prefix + d["prefix"] = self.prefix if stream is None: return d stream.write(json.dumps(d)) diff --git a/conda_env/installers/base.py b/conda_env/installers/base.py index c625d4d084f..3219e2edceb 100644 --- a/conda_env/installers/base.py +++ b/conda_env/installers/base.py @@ -1,7 +1,8 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import importlib -ENTRY_POINT = 'conda_env.installers' + +ENTRY_POINT = "conda_env.installers" class InvalidInstaller(Exception): @@ -12,6 +13,6 @@ def __init__(self, name): def get_installer(name): try: - return importlib.import_module(ENTRY_POINT + '.' + name) + return importlib.import_module(ENTRY_POINT + "." + name) except ImportError: raise InvalidInstaller(name) diff --git a/conda_env/installers/conda.py b/conda_env/installers/conda.py index 4a689054ad3..cb1ba869483 100644 --- a/conda_env/installers/conda.py +++ b/conda_env/installers/conda.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import tempfile from os.path import basename @@ -21,9 +20,9 @@ def _solve(prefix, specs, args, env, *_, **kwargs): # TODO: support all various ways this happens # Including 'nodefaults' in the channels list disables the defaults - channel_urls = [chan for chan in env.channels if chan != 'nodefaults'] + channel_urls = [chan for chan in env.channels if chan != "nodefaults"] - if 'nodefaults' not in env.channels: + if "nodefaults" not in env.channels: channel_urls.extend(context.channels) _channel_priority_map = prioritize_channels(channel_urls) @@ -39,9 +38,7 @@ def dry_run(specs, args, env, *_, **kwargs): solver = _solve(tempfile.mkdtemp(), specs, args, env, *_, **kwargs) pkgs = solver.solve_final_state() solved_env = Environment( - name=env.name, - dependencies=[str(p) for p in pkgs], - channels=env.channels + name=env.name, dependencies=[str(p) for p in pkgs], channels=env.channels ) return solved_env @@ -51,10 +48,13 @@ def install(prefix, specs, args, env, *_, **kwargs): try: unlink_link_transaction = solver.solve_for_transaction( - prune=getattr(args, 'prune', False), update_modifier=UpdateModifier.FREEZE_INSTALLED) + prune=getattr(args, "prune", False), + update_modifier=UpdateModifier.FREEZE_INSTALLED, + ) except (UnsatisfiableError, SystemExit): unlink_link_transaction = solver.solve_for_transaction( - prune=getattr(args, 'prune', False), update_modifier=NULL) + prune=getattr(args, "prune", False), update_modifier=NULL + ) if unlink_link_transaction.nothing_to_do: return None diff --git a/conda_env/installers/pip.py b/conda_env/installers/pip.py index 56967a4b819..ae2a5c74fbf 100644 --- a/conda_env/installers/pip.py +++ b/conda_env/installers/pip.py @@ -1,15 +1,14 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import os import os.path as op -from conda.auxlib.compat import Utf8NamedTemporaryFile -from conda.gateways.connection.session import CONDA_SESSION_SCHEMES -from conda_env.pip_util import pip_subprocess, get_pip_installed_packages -from conda.common.io import Spinner -from conda.base.context import context from logging import getLogger +from conda.auxlib.compat import Utf8NamedTemporaryFile +from conda.base.context import context +from conda.common.io import Spinner +from conda.gateways.connection.session import CONDA_SESSION_SCHEMES +from conda_env.pip_util import get_pip_installed_packages, pip_subprocess log = getLogger(__name__) @@ -39,31 +38,38 @@ def _pip_install_via_requirements(prefix, specs, args, *_, **kwargs): requirements = None try: # Generate the temporary requirements file - requirements = Utf8NamedTemporaryFile(mode='w', - prefix='condaenv.', - suffix='.requirements.txt', - dir=pip_workdir, - delete=False) - requirements.write('\n'.join(specs)) + requirements = Utf8NamedTemporaryFile( + mode="w", + prefix="condaenv.", + suffix=".requirements.txt", + dir=pip_workdir, + delete=False, + ) + requirements.write("\n".join(specs)) requirements.close() # pip command line... # see https://pip.pypa.io/en/stable/cli/pip/#exists-action-option - pip_cmd = ['install', '-U', '-r', requirements.name, '--exists-action=b'] + pip_cmd = ["install", "-U", "-r", requirements.name, "--exists-action=b"] stdout, stderr = pip_subprocess(pip_cmd, prefix, cwd=pip_workdir) finally: # Win/Appveyor does not like it if we use context manager + delete=True. # So we delete the temporary file in a finally block. if requirements is not None and op.isfile(requirements.name): - if 'CONDA_TEST_SAVE_TEMPS' not in os.environ: + if "CONDA_TEST_SAVE_TEMPS" not in os.environ: os.remove(requirements.name) else: - log.warning('CONDA_TEST_SAVE_TEMPS :: retaining pip requirements.txt {}' - .format(requirements.name)) + log.warning( + "CONDA_TEST_SAVE_TEMPS :: retaining pip requirements.txt {}".format( + requirements.name + ) + ) return get_pip_installed_packages(stdout) def install(*args, **kwargs): - with Spinner("Installing pip dependencies", - not context.verbosity and not context.quiet, - context.json): + with Spinner( + "Installing pip dependencies", + not context.verbosity and not context.quiet, + context.json, + ): return _pip_install_via_requirements(*args, **kwargs) diff --git a/conda_env/pip_util.py b/conda_env/pip_util.py index a385a741c00..ef2adc2465c 100644 --- a/conda_env/pip_util.py +++ b/conda_env/pip_util.py @@ -7,26 +7,25 @@ """ import json -from logging import getLogger import os import re import sys +from logging import getLogger +from conda.base.context import context from conda.exceptions import CondaEnvException -from conda.gateways.subprocess import any_subprocess from conda.exports import on_win -from conda.base.context import context - +from conda.gateways.subprocess import any_subprocess log = getLogger(__name__) def pip_subprocess(args, prefix, cwd): if on_win: - python_path = os.path.join(prefix, 'python.exe') + python_path = os.path.join(prefix, "python.exe") else: - python_path = os.path.join(prefix, 'bin', 'python') - run_args = [python_path, '-m', 'pip'] + args + python_path = os.path.join(prefix, "bin", "python") + run_args = [python_path, "-m", "pip"] + args stdout, stderr, rc = any_subprocess(run_args, prefix, cwd=cwd) if not context.quiet and not context.json: print("Ran pip subprocess with arguments:") @@ -53,7 +52,7 @@ def get_pip_installed_packages(stdout): def get_pip_version(prefix): - stdout, stderr = pip_subprocess(['-V'], prefix) + stdout, stderr = pip_subprocess(["-V"], prefix) pip_version = re.search(r"pip\ (\d+\.\d+\.\d+)", stdout) if not pip_version: raise CondaEnvException("Failed to find pip version string in output") @@ -65,19 +64,21 @@ def get_pip_version(prefix): class PipPackage(dict): def __str__(self): if "path" in self: - return "{} ({})-{}-".format(self["name"], self["path"], self["version"]) + return "{} ({})-{}-".format( + self["name"], self["path"], self["version"] + ) return "{}-{}-".format(self["name"], self["version"]) def installed(prefix, output=True): pip_version = get_pip_version(prefix) - pip_major_version = int(pip_version.split('.', 1)[0]) + pip_major_version = int(pip_version.split(".", 1)[0]) env = os.environ.copy() - args = ['list'] + args = ["list"] if pip_major_version >= 9: - args += ['--format', 'json'] + args += ["--format", "json"] else: env["PIP_FORMAT"] = "legacy" @@ -96,25 +97,25 @@ def installed(prefix, output=True): # in installed append a fake name to installed with 'pip' # as the build string for kwargs in pkgs: - kwargs['name'] = kwargs['name'].lower() - if ', ' in kwargs['version']: + kwargs["name"] = kwargs["name"].lower() + if ", " in kwargs["version"]: # Packages installed with setup.py develop will include a path in # the version. They should be included here, even if they are # installed with conda, as they are preferred over the conda # version. We still include the conda version, though, because it # is still installed. - version, path = kwargs['version'].split(', ', 1) + version, path = kwargs["version"].split(", ", 1) # We do this because the code below uses rsplit('-', 2) - version = version.replace('-', ' ') - kwargs['version'] = version - kwargs['path'] = path + version = version.replace("-", " ") + kwargs["version"] = version + kwargs["path"] = path yield PipPackage(**kwargs) else: # For every package in pipinst that is not already represented # in installed append a fake name to installed with 'pip' # as the build string - pat = re.compile(r'([\w.-]+)\s+\((.+)\)') + pat = re.compile(r"([\w.-]+)\s+\((.+)\)") for line in pip_stdout.splitlines(): line = line.strip() if not line: @@ -122,28 +123,33 @@ def installed(prefix, output=True): m = pat.match(line) if m is None: if output: - print('Could not extract name and version from: %r' % line, file=sys.stderr) + print( + "Could not extract name and version from: %r" % line, + file=sys.stderr, + ) continue name, version = m.groups() name = name.lower() kwargs = { - 'name': name, - 'version': version, + "name": name, + "version": version, } - if ', ' in version: + if ", " in version: # Packages installed with setup.py develop will include a path in # the version. They should be included here, even if they are # installed with conda, as they are preferred over the conda # version. We still include the conda version, though, because it # is still installed. - version, path = version.split(', ') + version, path = version.split(", ") # We do this because the code below uses rsplit('-', 2) - version = version.replace('-', ' ') - kwargs.update({ - 'path': path, - 'version': version, - }) + version = version.replace("-", " ") + kwargs.update( + { + "path": path, + "version": version, + } + ) yield PipPackage(**kwargs) @@ -170,7 +176,7 @@ def add_pip_installed(prefix, installed_pkgs, json=None, output=True): # because pip normalizes `foo_bar` to `foo-bar` conda_names = {_canonicalize_name(rec.name) for rec in installed_pkgs} for pip_pkg in installed(prefix, output=output): - pip_name = _canonicalize_name(pip_pkg['name']) - if pip_name in conda_names and 'path' not in pip_pkg: + pip_name = _canonicalize_name(pip_pkg["name"]) + if pip_name in conda_names and "path" not in pip_pkg: continue installed_pkgs.add(str(pip_pkg)) diff --git a/conda_env/specs/__init__.py b/conda_env/specs/__init__.py index 0436164787c..f4372915370 100644 --- a/conda_env/specs/__init__.py +++ b/conda_env/specs/__init__.py @@ -3,7 +3,7 @@ from __future__ import annotations import os -from typing import Union, Type +from typing import Type, Union from conda.exceptions import ( EnvironmentFileExtensionNotValid, @@ -16,7 +16,6 @@ from .requirements import RequirementsSpec from .yaml_file import YamlFileSpec - FileSpecTypes = Union[Type[YamlFileSpec], Type[RequirementsSpec]] @@ -31,7 +30,9 @@ def get_spec_class_from_file(filename: str) -> FileSpecTypes: _, ext = os.path.splitext(filename) # First check if file exists and test the known valid extension for specs - file_exists = os.path.isfile(filename) or filename.split("://", 1)[0] in CONDA_SESSION_SCHEMES + file_exists = ( + os.path.isfile(filename) or filename.split("://", 1)[0] in CONDA_SESSION_SCHEMES + ) if file_exists: if ext == "" or ext not in all_valid_exts: raise EnvironmentFileExtensionNotValid(filename) @@ -47,7 +48,10 @@ def get_spec_class_from_file(filename: str) -> FileSpecTypes: def detect( - name: str = None, filename: str = None, directory: str = None, remote_definition: str = None + name: str = None, + filename: str = None, + directory: str = None, + remote_definition: str = None, ) -> SpecTypes: """ Return the appropriate spec type to use. diff --git a/conda_env/specs/binstar.py b/conda_env/specs/binstar.py index 2bfea47fc60..e7a76b7acab 100644 --- a/conda_env/specs/binstar.py +++ b/conda_env/specs/binstar.py @@ -2,17 +2,16 @@ # SPDX-License-Identifier: BSD-3-Clause from __future__ import annotations -from functools import cached_property import re +from functools import cached_property from types import ModuleType from conda.exceptions import EnvironmentFileNotDownloaded from conda.models.version import normalized_version -from ..env import from_yaml, Environment - +from ..env import Environment, from_yaml -ENVIRONMENT_TYPE = 'env' +ENVIRONMENT_TYPE = "env" class BinstarSpec: @@ -23,6 +22,7 @@ class BinstarSpec: spec.msg # => Error messages :raises: EnvironmentFileNotDownloaded """ + msg = None def __init__(self, name=None): @@ -36,8 +36,10 @@ def can_handle(self) -> bool: # TODO: log information about trying to find the package in binstar.org if self.valid_name(): if not self.binstar: - self.msg = ("Anaconda Client is required to interact with anaconda.org or an " - "Anaconda API. Please run `conda install anaconda-client -n base`.") + self.msg = ( + "Anaconda Client is required to interact with anaconda.org or an " + "Anaconda API. Please run `conda install anaconda-client -n base`." + ) return False return self.package is not None and self.valid_package() @@ -74,7 +76,9 @@ def binstar(self) -> ModuleType: @cached_property def file_data(self) -> list[dict[str, str]]: - return [data for data in self.package["files"] if data["type"] == ENVIRONMENT_TYPE] + return [ + data for data in self.package["files"] if data["type"] == ENVIRONMENT_TYPE + ] @cached_property def environment(self) -> Environment: @@ -83,7 +87,9 @@ def environment(self) -> Environment: for d in self.file_data ] latest_version = max(versions, key=lambda x: x["normalized"])["original"] - file_data = [data for data in self.package["files"] if data["version"] == latest_version] + file_data = [ + data for data in self.package["files"] if data["version"] == latest_version + ] req = self.binstar.download( self.username, self.packagename, latest_version, file_data[0]["basename"] ) diff --git a/conda_env/specs/requirements.py b/conda_env/specs/requirements.py index 0de67d442f6..a0e699b12dc 100644 --- a/conda_env/specs/requirements.py +++ b/conda_env/specs/requirements.py @@ -6,10 +6,11 @@ class RequirementsSpec: - ''' + """ Reads dependencies from a requirements.txt file and returns an Environment object from it. - ''' + """ + msg = None extensions = {".txt"} @@ -41,10 +42,7 @@ def environment(self): with open(self.filename) as reqfile: for line in reqfile: line = line.strip() - if not line or line.startswith('#'): + if not line or line.startswith("#"): continue dependencies.append(line) - return env.Environment( - name=self.name, - dependencies=dependencies - ) + return env.Environment(name=self.name, dependencies=dependencies) diff --git a/docs/scrape_help.py b/docs/scrape_help.py index e59b8196440..88bd24243d9 100755 --- a/docs/scrape_help.py +++ b/docs/scrape_help.py @@ -1,24 +1,20 @@ #!/usr/bin/env python # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from subprocess import check_output, PIPE, Popen, STDOUT -from os.path import join, dirname, abspath, isdir -from os import makedirs, pathsep +import json +import re +import sys from collections import OrderedDict - from concurrent.futures import ThreadPoolExecutor - +from os import makedirs, pathsep +from os.path import abspath, dirname, isdir, join from shlex import quote +from subprocess import PIPE, STDOUT, Popen, check_output -import sys -import json -import re - -manpath = join(dirname(__file__), 'build', 'man') +manpath = join(dirname(__file__), "build", "man") if not isdir(manpath): makedirs(manpath) -rstpath = join(dirname(__file__), 'source', 'commands') +rstpath = join(dirname(__file__), "source", "commands") if not isdir(rstpath): makedirs(rstpath) @@ -32,8 +28,9 @@ """ + def run_command(*args, **kwargs): - include_stderr = kwargs.pop('include_stderr', False) + include_stderr = kwargs.pop("include_stderr", False) if include_stderr: stderr_pipe = STDOUT else: @@ -41,27 +38,34 @@ def run_command(*args, **kwargs): p = Popen(*args, stdout=PIPE, stderr=stderr_pipe, **kwargs) out, err = p.communicate() if err is None: - err = b'' - out, err = out.decode('utf-8'), err.decode('utf-8') + err = b"" + out, err = out.decode("utf-8"), err.decode("utf-8") if p.returncode != 0: - print("%r failed with error code %s" % - (' '.join(map(quote, args[0])), p.returncode), file=sys.stderr) + print( + "%r failed with error code %s" + % (" ".join(map(quote, args[0])), p.returncode), + file=sys.stderr, + ) elif err: print("{!r} gave stderr output: {}".format(" ".join(*args), err)) return out + def str_check_output(*args, **kwargs): - return check_output(*args, **kwargs).decode('utf-8') + return check_output(*args, **kwargs).decode("utf-8") + def conda_help(cache=[]): if cache: return cache[0] - cache.append(str_check_output(['conda', '--help'])) + cache.append(str_check_output(["conda", "--help"])) return cache[0] + def conda_command_help(command): - return str_check_output(['conda'] + command.split() + ['--help']) + return str_check_output(["conda"] + command.split() + ["--help"]) + def conda_commands(): print("Getting list of core commands") @@ -70,17 +74,18 @@ def conda_commands(): start = False for line in help.splitlines(): # Commands start after "command" header - if line.strip() == 'command': + if line.strip() == "command": start = True continue if start: # The end of the commands if not line: break - if line[4] != ' ': + if line[4] != " ": commands.append(line.split()[0]) return commands + def external_commands(): print("Getting list of external commands") help = conda_help() @@ -88,19 +93,19 @@ def external_commands(): start = False for line in help.splitlines(): # Commands start after "command" header - if line.strip() == 'other commands:': + if line.strip() == "other commands:": start = True continue if start: # The end of the commands if not line: break - if line[4] != ' ': + if line[4] != " ": commands.append(line.split()[0]) # TODO: Parallelize this print("Getting list of external subcommands") - subcommands_re = re.compile(r'\s*\{(.*)\}\s*') + subcommands_re = re.compile(r"\s*\{(.*)\}\s*") # Check for subcommands (like conda skeleton pypi) command_help = {} @@ -126,40 +131,51 @@ def get_help(command): break return commands + def man_replacements(): # XXX: We should use conda-api for this, but it's currently annoying to set the # root prefix with. - info = json.loads(str_check_output(['conda', 'info', '--json'])) + info = json.loads(str_check_output(["conda", "info", "--json"])) # We need to use an ordered dict because the root prefix should be # replaced last, since it is typically a substring of the default prefix - r = OrderedDict([ - (info['default_prefix'], 'default prefix'), - (pathsep.join(info['envs_dirs']), 'envs dirs'), - # For whatever reason help2man won't italicize these on its own - # Note these require conda > 3.7.1 - (info['user_rc_path'], r'\fI\,user .condarc path\/\fP'), - (info['sys_rc_path'], r'\fI\,system .condarc path\/\fP'), - - (info['root_prefix'], r'root prefix'), - ]) + r = OrderedDict( + [ + (info["default_prefix"], "default prefix"), + (pathsep.join(info["envs_dirs"]), "envs dirs"), + # For whatever reason help2man won't italicize these on its own + # Note these require conda > 3.7.1 + (info["user_rc_path"], r"\fI\,user .condarc path\/\fP"), + (info["sys_rc_path"], r"\fI\,system .condarc path\/\fP"), + (info["root_prefix"], r"root prefix"), + ] + ) return r + def generate_man(command): - conda_version = run_command(['conda', '--version'], include_stderr=True) + conda_version = run_command(["conda", "--version"], include_stderr=True) - manpage = '' + manpage = "" retries = 5 while not manpage and retries: - manpage = run_command([ - 'help2man', - '--name', 'conda', command, - '--section', '1', - '--source', 'Anaconda, Inc.', - '--version-string', conda_version, - '--no-info', - 'conda', 'command', - ]) + manpage = run_command( + [ + "help2man", + "--name", + "conda", + command, + "--section", + "1", + "--source", + "Anaconda, Inc.", + "--version-string", + conda_version, + "--no-info", + "conda", + "command", + ] + ) retries -= 1 if not manpage: @@ -168,26 +184,34 @@ def generate_man(command): replacements = man_replacements() for text in replacements: manpage = manpage.replace(text, replacements[text]) - with open(join(manpath, 'conda-%s.1' % command.replace(' ', '-')), 'w') as f: + with open(join(manpath, "conda-%s.1" % command.replace(" ", "-")), "w") as f: f.write(manpage) print("Generated manpage for conda %s" % command) + def generate_html(command): - command_file = command.replace(' ', '-') + command_file = command.replace(" ", "-") # Use abspath so that it always has a path separator - man = Popen(['man', abspath(join(manpath, 'conda-%s.1' % command_file))], stdout=PIPE) - htmlpage = check_output([ - 'man2html', - '-bare', # Don't use HTML, HEAD, or BODY tags - 'title', 'conda-%s' % command_file, - '-topm', '0', # No top margin - '-botm', '0', # No bottom margin - ], - stdin=man.stdout) - - with open(join(manpath, 'conda-%s.html' % command_file), 'wb') as f: + man = Popen( + ["man", abspath(join(manpath, "conda-%s.1" % command_file))], stdout=PIPE + ) + htmlpage = check_output( + [ + "man2html", + "-bare", # Don't use HTML, HEAD, or BODY tags + "title", + "conda-%s" % command_file, + "-topm", + "0", # No top margin + "-botm", + "0", # No bottom margin + ], + stdin=man.stdout, + ) + + with open(join(manpath, "conda-%s.html" % command_file), "wb") as f: f.write(htmlpage) print("Generated html for conda %s" % command) @@ -202,47 +226,48 @@ def write_rst(command, sep=None): rp = join(rp, sep) if not isdir(rp): makedirs(rp) - with open(join(rp, 'conda-%s.rst' % command_file), 'w') as f: + with open(join(rp, "conda-%s.rst" % command_file), "w") as f: f.write(RST_HEADER.format(command=command)) for line in html.splitlines(): - f.write(' ') + f.write(" ") f.write(line) - f.write('\n') + f.write("\n") print("Generated rst for conda %s" % command) + def main(): core_commands = conda_commands() # let's just hard-code this for now # build_commands = () build_commands = [ - 'build', - 'convert', - 'develop', - 'index', - 'inspect', - 'inspect channels', - 'inspect linkages', - 'inspect objects', - 'metapackage', + "build", + "convert", + "develop", + "index", + "inspect", + "inspect channels", + "inspect linkages", + "inspect objects", + "metapackage", # 'pipbuild', - 'render', + "render", # let's drop this one; I've dropped support for it in 4.3.x # coming back with TUF in the near future # 'sign', - 'skeleton', - 'skeleton cpan', - 'skeleton cran', - 'skeleton luarocks', - 'skeleton pypi', - 'env', - 'env attach', - 'env create', - 'env export', - 'env list', - 'env remove', - 'env update', - 'env upload', + "skeleton", + "skeleton cpan", + "skeleton cran", + "skeleton luarocks", + "skeleton pypi", + "env", + "env attach", + "env create", + "env export", + "env list", + "env remove", + "env update", + "env upload", ] commands = sys.argv[1:] or core_commands + build_commands @@ -258,11 +283,11 @@ def gen_command(command): for command in [c for c in core_commands if c in commands]: write_rst(command) for command in [c for c in build_commands if c in commands]: - if 'env' in command: - write_rst(command, sep='env') + if "env" in command: + write_rst(command, sep="env") else: - write_rst(command, sep='build') + write_rst(command, sep="build") -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/docs/source/_ext/conda_umls.py b/docs/source/_ext/conda_umls.py index 9eed5f21efb..15b79929960 100644 --- a/docs/source/_ext/conda_umls.py +++ b/docs/source/_ext/conda_umls.py @@ -1,12 +1,12 @@ #!/usr/bin/env python # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import fileinput import os -import requests import shutil import sys + +import requests from pylint.pyreverse.main import Run here = os.path.dirname(__file__) diff --git a/docs/source/conf.py b/docs/source/conf.py index f5477ee61fb..3b4f0d86add 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,7 +1,6 @@ #!/usr/bin/env python3 # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - # # conda documentation build configuration file, created by # sphinx-quickstart on Wed Aug 16 00:33:55 2017. @@ -238,6 +237,6 @@ intersphinx_mapping = { - 'python': ('https://docs.python.org/3', None), - 'pluggy': ('https://pluggy.readthedocs.io/en/stable/', None), + "python": ("https://docs.python.org/3", None), + "pluggy": ("https://pluggy.readthedocs.io/en/stable/", None), } diff --git a/news/12554-auto-format b/news/12554-auto-format new file mode 100644 index 00000000000..707056ddb0b --- /dev/null +++ b/news/12554-auto-format @@ -0,0 +1,20 @@ +### Enhancements + +* + +### Bug fixes + +* + +### Deprecations + +* + +### Docs + +* + +### Other + +* Format with black and replaced pre-commit's darker hook with black. (#12554) +* Format with isort and add pre-commit isort hook. (#12554) diff --git a/pyproject.toml b/pyproject.toml index 7529b16450b..a3dd56b11a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,8 @@ [tool.black] -line-length = 99 -target-version = ['py37', 'py38', 'py39', 'py310'] -extend-exclude = ''' -^/( - conda/_vendor - | devenv -)/ -''' +target-version = ['py38', 'py39', 'py310'] + +[tool.isort] +profile = "black" [tool.vendoring] destination = "conda/_vendor/" diff --git a/setup.py b/setup.py index b2d7a975aa3..17221c51f93 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import os import sys @@ -41,9 +40,10 @@ "tqdm >=4", ] + def package_files(*root_directories): return [ - os.path.join('..', path, filename) + os.path.join("..", path, filename) for directory in root_directories for (path, directories, filenames) in os.walk(directory) for filename in filenames @@ -72,15 +72,15 @@ def package_files(*root_directories): exclude=("tests", "tests.*", "build", "utils", ".tox") ), package_data={ - '': package_files('conda/shell') + ['LICENSE'], + "": package_files("conda/shell") + ["LICENSE"], }, cmdclass={ "build_py": conda.auxlib.packaging.BuildPyCommand, "sdist": conda.auxlib.packaging.SDistCommand, }, entry_points={ - 'console_scripts': [ - 'conda=conda.cli.main_pip:main', + "console_scripts": [ + "conda=conda.cli.main_pip:main", ], }, install_requires=install_requires, diff --git a/tests/__init__.py b/tests/__init__.py index 8278ce0cadc..472aa33ce40 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,9 +1,9 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - # This is just here so that tests is a package, so that dotted relative # imports work. from conda.gateways.logging import initialize_logging + initialize_logging() from conda.testing import ( diff --git a/tests/base/test_constants.py b/tests/base/test_constants.py index 0bd04acf4e4..6ac6594a889 100644 --- a/tests/base/test_constants.py +++ b/tests/base/test_constants.py @@ -1,10 +1,9 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +from logging import getLogger from conda.base.constants import ChannelPriority from conda.common.constants import NULL -from logging import getLogger log = getLogger(__name__) @@ -17,4 +16,4 @@ def test_ChannelPriority(): assert ChannelPriority("strict") == ChannelPriority.STRICT assert ChannelPriority["STRICT"] == ChannelPriority.STRICT assert ChannelPriority(False) == ChannelPriority.DISABLED - assert ChannelPriority('false') == ChannelPriority.DISABLED + assert ChannelPriority("false") == ChannelPriority.DISABLED diff --git a/tests/base/test_context.py b/tests/base/test_context.py index 79cd2167e0a..34054f43d95 100644 --- a/tests/base/test_context.py +++ b/tests/base/test_context.py @@ -1,10 +1,8 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - -from itertools import chain import os -from os.path import join, abspath +from itertools import chain +from os.path import abspath, join from pathlib import Path from tempfile import gettempdir from unittest import TestCase, mock @@ -13,29 +11,28 @@ from conda.auxlib.collection import AttrDict from conda.auxlib.ish import dals -from conda.base.constants import PathConflict, ChannelPriority +from conda.base.constants import ChannelPriority, PathConflict from conda.base.context import ( + conda_tests_ctxt_mgmt_def_pol, context, reset_context, - conda_tests_ctxt_mgmt_def_pol, validate_prefix_name, ) from conda.common.configuration import ValidationError, YamlRawParameter from conda.common.io import env_var, env_vars from conda.common.path import expand, win_path_backout -from conda.common.url import join_url, path_to_url from conda.common.serialize import yaml_round_trip_load +from conda.common.url import join_url, path_to_url from conda.core.package_cache_data import PackageCacheData -from conda.exceptions import EnvironmentNameNotFound, CondaValueError -from conda.gateways.disk.create import mkdir_p, create_package_cache_directory +from conda.exceptions import CondaValueError, EnvironmentNameNotFound +from conda.gateways.disk.create import create_package_cache_directory, mkdir_p from conda.gateways.disk.delete import rm_rf from conda.gateways.disk.permissions import make_read_only from conda.gateways.disk.update import touch from conda.models.channel import Channel from conda.models.match_spec import MatchSpec -from conda.utils import on_win - from conda.testing.helpers import tempdir +from conda.utils import on_win TEST_CONDARC = """ custom_channels: @@ -75,7 +72,6 @@ class ContextCustomRcTests(TestCase): - def setUp(self): string = TEST_CONDARC reset_context(()) @@ -90,27 +86,40 @@ def tearDown(self): reset_context() def test_migrated_custom_channels(self): - assert Channel('https://some.url.somewhere/stuff/darwin/noarch/a-mighty-fine.tar.bz2').canonical_name == 'darwin' - assert Channel('s3://just/cant/darwin/noarch/a-mighty-fine.tar.bz2').canonical_name == 'darwin' - assert Channel('s3://just/cant/darwin/noarch/a-mighty-fine.tar.bz2').urls() == [ - 'https://some.url.somewhere/stuff/darwin/noarch'] + assert ( + Channel( + "https://some.url.somewhere/stuff/darwin/noarch/a-mighty-fine.tar.bz2" + ).canonical_name + == "darwin" + ) + assert ( + Channel("s3://just/cant/darwin/noarch/a-mighty-fine.tar.bz2").canonical_name + == "darwin" + ) + assert Channel("s3://just/cant/darwin/noarch/a-mighty-fine.tar.bz2").urls() == [ + "https://some.url.somewhere/stuff/darwin/noarch" + ] def test_old_channel_alias(self): platform = context.subdir - cf_urls = ["ftp://new.url:8082/conda-forge/%s" % platform, - "ftp://new.url:8082/conda-forge/noarch"] - assert Channel('conda-forge').urls() == cf_urls + cf_urls = [ + "ftp://new.url:8082/conda-forge/%s" % platform, + "ftp://new.url:8082/conda-forge/noarch", + ] + assert Channel("conda-forge").urls() == cf_urls url = "https://conda.anaconda.org/conda-forge/osx-64/some-great-package.tar.bz2" - assert Channel(url).canonical_name == 'conda-forge' - assert Channel(url).base_url == 'ftp://new.url:8082/conda-forge' + assert Channel(url).canonical_name == "conda-forge" + assert Channel(url).base_url == "ftp://new.url:8082/conda-forge" assert Channel(url).urls() == [ - 'ftp://new.url:8082/conda-forge/osx-64', - 'ftp://new.url:8082/conda-forge/noarch' + "ftp://new.url:8082/conda-forge/osx-64", + "ftp://new.url:8082/conda-forge/noarch", ] - assert Channel("https://conda.anaconda.org/conda-forge/label/dev/linux-64/" - "some-great-package.tar.bz2").urls() == [ + assert Channel( + "https://conda.anaconda.org/conda-forge/label/dev/linux-64/" + "some-great-package.tar.bz2" + ).urls() == [ "ftp://new.url:8082/conda-forge/label/dev/linux-64", "ftp://new.url:8082/conda-forge/label/dev/noarch", ] @@ -128,9 +137,11 @@ def test_signing_metadata_url_base(self): assert context.signing_metadata_url_base == SIGNING_URL_BASE def test_signing_metadata_url_base_empty_default_channels(self): - string = dals(""" + string = dals( + """ default_channels: [] - """) + """ + ) reset_context() rd = { "testdata": YamlRawParameter.make_raw_parameters( @@ -141,11 +152,12 @@ def test_signing_metadata_url_base_empty_default_channels(self): assert len(context.default_channels) == 0 assert context.signing_metadata_url_base is None - def test_client_ssl_cert(self): - string = dals(""" + string = dals( + """ client_ssl_cert_key: /some/key/path - """) + """ + ) reset_context() rd = { "testdata": YamlRawParameter.make_raw_parameters( @@ -156,35 +168,39 @@ def test_client_ssl_cert(self): pytest.raises(ValidationError, context.validate_configuration) def test_conda_envs_path(self): - saved_envs_path = os.environ.get('CONDA_ENVS_PATH') + saved_envs_path = os.environ.get("CONDA_ENVS_PATH") beginning = "C:" + os.sep if on_win else os.sep - path1 = beginning + os.sep.join(['my', 'envs', 'dir', '1']) - path2 = beginning + os.sep.join(['my', 'envs', 'dir', '2']) + path1 = beginning + os.sep.join(["my", "envs", "dir", "1"]) + path2 = beginning + os.sep.join(["my", "envs", "dir", "2"]) try: - os.environ['CONDA_ENVS_PATH'] = path1 + os.environ["CONDA_ENVS_PATH"] = path1 reset_context() assert context.envs_dirs[0] == path1 - os.environ['CONDA_ENVS_PATH'] = os.pathsep.join([path1, path2]) + os.environ["CONDA_ENVS_PATH"] = os.pathsep.join([path1, path2]) reset_context() assert context.envs_dirs[0] == path1 assert context.envs_dirs[1] == path2 finally: if saved_envs_path: - os.environ['CONDA_ENVS_PATH'] = saved_envs_path + os.environ["CONDA_ENVS_PATH"] = saved_envs_path else: - del os.environ['CONDA_ENVS_PATH'] + del os.environ["CONDA_ENVS_PATH"] def test_conda_bld_path(self): - conda_bld_path = join(gettempdir(), 'conda-bld') + conda_bld_path = join(gettempdir(), "conda-bld") conda_bld_url = path_to_url(conda_bld_path) try: mkdir_p(conda_bld_path) - with env_var('CONDA_BLD_PATH', conda_bld_path, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_BLD_PATH", + conda_bld_path, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): assert len(context.conda_build_local_paths) >= 1 assert context.conda_build_local_paths[0] == conda_bld_path - channel = Channel('local') + channel = Channel("local") assert channel.channel_name == "local" assert channel.channel_location is None assert channel.platform is None @@ -214,58 +230,82 @@ def test_conda_bld_path(self): assert channel.scheme == "file" assert channel.urls() == [ join_url(conda_bld_url, context.subdir), - join_url(conda_bld_url, 'noarch'), + join_url(conda_bld_url, "noarch"), ] assert channel.url() == join_url(conda_bld_url, context.subdir) - assert channel.channel_name.lower() == win_path_backout(conda_bld_path).lstrip('/').lower() - assert channel.channel_location == '' # location really is an empty string; all path information is in channel_name + assert ( + channel.channel_name.lower() + == win_path_backout(conda_bld_path).lstrip("/").lower() + ) + assert ( + channel.channel_location == "" + ) # location really is an empty string; all path information is in channel_name assert channel.canonical_name == "local" finally: rm_rf(conda_bld_path) def test_custom_multichannels(self): - assert context.custom_multichannels['michele'] == ( - Channel('passion'), - Channel('learn_from_every_thing'), + assert context.custom_multichannels["michele"] == ( + Channel("passion"), + Channel("learn_from_every_thing"), ) def test_restore_free_channel(self): - assert 'https://repo.anaconda.com/pkgs/free' not in context.default_channels - with env_var("CONDA_RESTORE_FREE_CHANNEL", 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): - assert context.default_channels.index('https://repo.anaconda.com/pkgs/free') == 1 + assert "https://repo.anaconda.com/pkgs/free" not in context.default_channels + with env_var( + "CONDA_RESTORE_FREE_CHANNEL", + "true", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + assert ( + context.default_channels.index("https://repo.anaconda.com/pkgs/free") + == 1 + ) def test_proxy_servers(self): - assert context.proxy_servers['http'] == 'http://user:pass@corp.com:8080' - assert context.proxy_servers['https'] is None - assert context.proxy_servers['ftp'] is None - assert context.proxy_servers['sftp'] == '' - assert context.proxy_servers['ftps'] == 'False' - assert context.proxy_servers['rsync'] == 'false' + assert context.proxy_servers["http"] == "http://user:pass@corp.com:8080" + assert context.proxy_servers["https"] is None + assert context.proxy_servers["ftp"] is None + assert context.proxy_servers["sftp"] == "" + assert context.proxy_servers["ftps"] == "False" + assert context.proxy_servers["rsync"] == "false" def test_conda_build_root_dir(self): - assert context.conda_build['root-dir'] == "/some/test/path" + assert context.conda_build["root-dir"] == "/some/test/path" def test_clobber_enum(self): - with env_var("CONDA_PATH_CONFLICT", 'prevent', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_PATH_CONFLICT", + "prevent", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): assert context.path_conflict == PathConflict.prevent def test_context_parameter_map(self): all_parameter_names = context.list_parameters() - all_mapped_parameter_names = tuple(chain.from_iterable(context.category_map.values())) + all_mapped_parameter_names = tuple( + chain.from_iterable(context.category_map.values()) + ) - unmapped_parameter_names = set(all_parameter_names) - set(all_mapped_parameter_names) + unmapped_parameter_names = set(all_parameter_names) - set( + all_mapped_parameter_names + ) assert not unmapped_parameter_names, unmapped_parameter_names assert len(all_parameter_names) == len(all_mapped_parameter_names) def test_context_parameters_have_descriptions(self): - skip_categories = ('CLI-only', 'Hidden and Undocumented') - documented_parameter_names = chain.from_iterable(( - parameter_names for category, parameter_names in context.category_map.items() - if category not in skip_categories - )) + skip_categories = ("CLI-only", "Hidden and Undocumented") + documented_parameter_names = chain.from_iterable( + ( + parameter_names + for category, parameter_names in context.category_map.items() + if category not in skip_categories + ) + ) from pprint import pprint + for name in documented_parameter_names: context.get_descriptions()[name] pprint(context.describe_parameter(name)) @@ -273,12 +313,16 @@ def test_context_parameters_have_descriptions(self): def test_local_build_root_custom_rc(self): assert context.local_build_root == abspath("/some/test/path") - test_path_1 = join(os.getcwd(), 'test_path_1') - with env_var("CONDA_CROOT", test_path_1, stack_callback=conda_tests_ctxt_mgmt_def_pol): + test_path_1 = join(os.getcwd(), "test_path_1") + with env_var( + "CONDA_CROOT", test_path_1, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert context.local_build_root == test_path_1 - test_path_2 = join(os.getcwd(), 'test_path_2') - with env_var("CONDA_BLD_PATH", test_path_2, stack_callback=conda_tests_ctxt_mgmt_def_pol): + test_path_2 = join(os.getcwd(), "test_path_2") + with env_var( + "CONDA_BLD_PATH", test_path_2, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert context.local_build_root == test_path_2 def test_default_target_is_root_prefix(self): @@ -286,35 +330,44 @@ def test_default_target_is_root_prefix(self): def test_target_prefix(self): with tempdir() as prefix: - mkdir_p(join(prefix, 'first', 'envs')) - mkdir_p(join(prefix, 'second', 'envs')) - create_package_cache_directory(join(prefix, 'first', 'pkgs')) - create_package_cache_directory(join(prefix, 'second', 'pkgs')) - envs_dirs = (join(prefix, 'first', 'envs'), join(prefix, 'second', 'envs')) - with env_var('CONDA_ENVS_DIRS', os.pathsep.join(envs_dirs), stack_callback=conda_tests_ctxt_mgmt_def_pol): - + mkdir_p(join(prefix, "first", "envs")) + mkdir_p(join(prefix, "second", "envs")) + create_package_cache_directory(join(prefix, "first", "pkgs")) + create_package_cache_directory(join(prefix, "second", "pkgs")) + envs_dirs = (join(prefix, "first", "envs"), join(prefix, "second", "envs")) + with env_var( + "CONDA_ENVS_DIRS", + os.pathsep.join(envs_dirs), + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): # with both dirs writable, choose first - reset_context((), argparse_args=AttrDict(name='blarg', func='create')) - assert context.target_prefix == join(envs_dirs[0], 'blarg') + reset_context((), argparse_args=AttrDict(name="blarg", func="create")) + assert context.target_prefix == join(envs_dirs[0], "blarg") # with first dir read-only, choose second PackageCacheData._cache_.clear() - make_read_only(join(envs_dirs[0], '.conda_envs_dir_test')) - reset_context((), argparse_args=AttrDict(name='blarg', func='create')) - assert context.target_prefix == join(envs_dirs[1], 'blarg') + make_read_only(join(envs_dirs[0], ".conda_envs_dir_test")) + reset_context((), argparse_args=AttrDict(name="blarg", func="create")) + assert context.target_prefix == join(envs_dirs[1], "blarg") # if first dir is read-only but environment exists, choose first PackageCacheData._cache_.clear() - mkdir_p(join(envs_dirs[0], 'blarg')) - touch(join(envs_dirs[0], 'blarg', 'history')) - reset_context((), argparse_args=AttrDict(name='blarg', func='create')) - assert context.target_prefix == join(envs_dirs[0], 'blarg') + mkdir_p(join(envs_dirs[0], "blarg")) + touch(join(envs_dirs[0], "blarg", "history")) + reset_context((), argparse_args=AttrDict(name="blarg", func="create")) + assert context.target_prefix == join(envs_dirs[0], "blarg") def test_aggressive_update_packages(self): assert context.aggressive_update_packages == () - specs = ['certifi', 'openssl>=1.1'] - with env_var('CONDA_AGGRESSIVE_UPDATE_PACKAGES', ','.join(specs), stack_callback=conda_tests_ctxt_mgmt_def_pol): - assert context.aggressive_update_packages == tuple(MatchSpec(s) for s in specs) + specs = ["certifi", "openssl>=1.1"] + with env_var( + "CONDA_AGGRESSIVE_UPDATE_PACKAGES", + ",".join(specs), + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + assert context.aggressive_update_packages == tuple( + MatchSpec(s) for s in specs + ) def test_channel_priority(self): assert context.channel_priority == ChannelPriority.DISABLED @@ -326,38 +379,42 @@ def test_threads(self): assert context.verify_threads == 1 assert context.execute_threads == 1 - with env_var('CONDA_DEFAULT_THREADS', '3', - stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_DEFAULT_THREADS", "3", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert context.default_threads == 3 assert context.verify_threads == 3 assert context.repodata_threads == 3 assert context.execute_threads == 3 - with env_var('CONDA_VERIFY_THREADS', '3', - stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_VERIFY_THREADS", "3", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert context.default_threads == default_value assert context.verify_threads == 3 assert context.repodata_threads == default_value assert context.execute_threads == 1 - with env_var('CONDA_REPODATA_THREADS', '3', - stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_REPODATA_THREADS", "3", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert context.default_threads == default_value assert context.verify_threads == 1 assert context.repodata_threads == 3 assert context.execute_threads == 1 - with env_var('CONDA_EXECUTE_THREADS', '3', - stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_EXECUTE_THREADS", "3", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert context.default_threads == default_value assert context.verify_threads == 1 assert context.repodata_threads == default_value assert context.execute_threads == 3 - with env_vars({'CONDA_EXECUTE_THREADS': '3', - 'CONDA_DEFAULT_THREADS': '1'}, - stack_callback=conda_tests_ctxt_mgmt_def_pol): - + with env_vars( + {"CONDA_EXECUTE_THREADS": "3", "CONDA_DEFAULT_THREADS": "1"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): assert context.default_threads == 1 assert context.verify_threads == 1 assert context.repodata_threads == 1 @@ -368,7 +425,7 @@ def test_channels_defaults(self): Test when no channels provided in cli """ reset_context(()) - assert context.channels == ('defaults',) + assert context.channels == ("defaults",) def test_channels_defaults_condarc(self): """ @@ -386,15 +443,15 @@ def test_channels_defaults_condarc(self): ) } context._set_raw_data(rd) - assert context.channels == ('defaults', 'conda-forge') + assert context.channels == ("defaults", "conda-forge") def test_specify_channels_cli_adding_defaults_no_condarc(self): """ When the channel haven't been specified in condarc, 'defaults' should be present when specifying channel in the cli """ - reset_context((), argparse_args=AttrDict(channel=['conda-forge'])) - assert context.channels == ('conda-forge', 'defaults') + reset_context((), argparse_args=AttrDict(channel=["conda-forge"])) + assert context.channels == ("conda-forge", "defaults") def test_specify_channels_cli_condarc(self): """ @@ -413,7 +470,7 @@ def test_specify_channels_cli_condarc(self): ) } context._set_raw_data(rd) - assert context.channels == ('defaults', 'conda-forge') + assert context.channels == ("defaults", "conda-forge") def test_specify_different_channels_cli_condarc(self): """ @@ -434,7 +491,7 @@ def test_specify_different_channels_cli_condarc(self): ) } context._set_raw_data(rd) - assert context.channels == ('conda-forge', 'other') + assert context.channels == ("conda-forge", "other") def test_specify_same_channels_cli_as_in_condarc(self): """ @@ -457,12 +514,13 @@ def test_specify_same_channels_cli_as_in_condarc(self): ) } context._set_raw_data(rd) - assert context.channels == ('conda-forge',) + assert context.channels == ("conda-forge",) def test_expandvars(self): """ Environment variables should be expanded in settings that have expandvars=True. """ + def _get_expandvars_context(attr, config_expr, env_value): with mock.patch.dict(os.environ, {"TEST_VAR": env_value}): reset_context(()) @@ -490,7 +548,9 @@ def _get_expandvars_context(attr, config_expr, env_value): "migrated_custom_channels", "proxy_servers", ): - value = _get_expandvars_context("proxy_servers", "{'x': '${TEST_VAR}'}", "foo") + value = _get_expandvars_context( + "proxy_servers", "{'x': '${TEST_VAR}'}", "foo" + ) assert value == {"x": "foo"} for attr in ( @@ -501,10 +561,14 @@ def _get_expandvars_context(attr, config_expr, env_value): value = _get_expandvars_context(attr, "['${TEST_VAR}']", "foo") assert value == ("foo",) - custom_channels = _get_expandvars_context("custom_channels", "{'x': '${TEST_VAR}'}", "http://foo") + custom_channels = _get_expandvars_context( + "custom_channels", "{'x': '${TEST_VAR}'}", "http://foo" + ) assert custom_channels["x"].location == "foo" - custom_multichannels = _get_expandvars_context("custom_multichannels", "{'x': ['${TEST_VAR}']}", "http://foo") + custom_multichannels = _get_expandvars_context( + "custom_multichannels", "{'x': ['${TEST_VAR}']}", "http://foo" + ) assert len(custom_multichannels["x"]) == 1 assert custom_multichannels["x"][0].location == "foo" @@ -520,24 +584,31 @@ def test_channel_settings(self): """ assert context.channel_settings == ( {"channel": "darwin", "param_one": "value_one", "param_two": "value_two"}, - {"channel": "http://localhost", "param_one": "value_one", "param_two": "value_two"}, + { + "channel": "http://localhost", + "param_one": "value_one", + "param_two": "value_two", + }, ) class ContextDefaultRcTests(TestCase): - def test_subdirs(self): - assert context.subdirs == (context.subdir, 'noarch') + assert context.subdirs == (context.subdir, "noarch") - subdirs = ('linux-highest', 'linux-64', 'noarch') - with env_var('CONDA_SUBDIRS', ','.join(subdirs), stack_callback=conda_tests_ctxt_mgmt_def_pol): + subdirs = ("linux-highest", "linux-64", "noarch") + with env_var( + "CONDA_SUBDIRS", + ",".join(subdirs), + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): assert context.subdirs == subdirs def test_local_build_root_default_rc(self): if context.root_writable: - assert context.local_build_root == join(context.root_prefix, 'conda-bld') + assert context.local_build_root == join(context.root_prefix, "conda-bld") else: - assert context.local_build_root == expand('~/conda-bld') + assert context.local_build_root == expand("~/conda-bld") if on_win: @@ -552,7 +623,10 @@ def test_local_build_root_default_rc(self): ( VALIDATE_PREFIX_ENV_NAME, False, - (VALIDATE_PREFIX_NAME_BASE_DIR, EnvironmentNameNotFound(VALIDATE_PREFIX_ENV_NAME)), + ( + VALIDATE_PREFIX_NAME_BASE_DIR, + EnvironmentNameNotFound(VALIDATE_PREFIX_ENV_NAME), + ), VALIDATE_PREFIX_NAME_BASE_DIR.joinpath(VALIDATE_PREFIX_ENV_NAME), ), # Passing in not allowed characters as the prefix name @@ -578,10 +652,9 @@ def test_local_build_root_default_rc(self): def test_validate_prefix_name(prefix, allow_base, mock_return_values, expected): ctx = mock.MagicMock() - with mock.patch("conda.base.context._first_writable_envs_dir") as mock_one, mock.patch( - "conda.base.context.locate_prefix_by_name" - ) as mock_two: - + with mock.patch( + "conda.base.context._first_writable_envs_dir" + ) as mock_one, mock.patch("conda.base.context.locate_prefix_by_name") as mock_two: mock_one.side_effect = [mock_return_values[0]] mock_two.side_effect = [mock_return_values[1]] diff --git a/tests/cli/test_cli_install.py b/tests/cli/test_cli_install.py index e2b17744b29..65e41d9262f 100644 --- a/tests/cli/test_cli_install.py +++ b/tests/cli/test_cli_install.py @@ -1,12 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from conda.testing.integration import run_command, Commands - import pytest -from conda.models.match_spec import MatchSpec from conda.exceptions import UnsatisfiableError from conda.gateways.disk.delete import rm_rf +from conda.models.match_spec import MatchSpec +from conda.testing.integration import Commands, run_command @pytest.fixture @@ -23,7 +22,9 @@ def prefix(tmpdir): def test_pre_link_message(mocker, prefix, pre_link_messages_package): prefix, _ = prefix mocker.patch("conda.cli.common.confirm_yn", return_value=True) - stdout, _, _ = run_command(Commands.INSTALL, prefix, pre_link_messages_package, "--use-local") + stdout, _, _ = run_command( + Commands.INSTALL, prefix, pre_link_messages_package, "--use-local" + ) assert "Lorem ipsum dolor sit amet" in stdout @@ -33,7 +34,10 @@ def test_find_conflicts_called_once(mocker, prefix): bad_deps = { "python": { ( - (MatchSpec("statistics"), MatchSpec("python[version='>=2.7,<2.8.0a0']")), + ( + MatchSpec("statistics"), + MatchSpec("python[version='>=2.7,<2.8.0a0']"), + ), "python=3", ) } diff --git a/tests/cli/test_common.py b/tests/cli/test_common.py index 617f31c9f1a..98068b26952 100644 --- a/tests/cli/test_common.py +++ b/tests/cli/test_common.py @@ -1,7 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - import os from io import StringIO @@ -15,10 +13,14 @@ from conda.common.io import captured, env_var from conda.exceptions import CondaSystemExit, DryRunExit, OperationNotAllowed + def test_check_non_admin_enabled_false(): - with env_var('CONDA_NON_ADMIN_ENABLED', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_NON_ADMIN_ENABLED", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): if on_win: from conda.common._os.windows import is_admin_on_windows + if is_admin_on_windows(): check_non_admin() else: @@ -33,19 +35,27 @@ def test_check_non_admin_enabled_false(): def test_check_non_admin_enabled_true(): - with env_var('CONDA_NON_ADMIN_ENABLED', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_NON_ADMIN_ENABLED", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): check_non_admin() assert True def test_confirm_yn_yes(monkeypatch): - monkeypatch.setattr('sys.stdin', StringIO('blah\ny\n')) - with env_var('CONDA_ALWAYS_YES', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_DRY_RUN', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): + monkeypatch.setattr("sys.stdin", StringIO("blah\ny\n")) + with env_var( + "CONDA_ALWAYS_YES", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + with env_var( + "CONDA_DRY_RUN", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): assert not context.always_yes - args = AttrDict({ - 'dry_run': False, - }) + args = AttrDict( + { + "dry_run": False, + } + ) with captured() as cap: choice = confirm_yn(args) assert choice is True @@ -53,16 +63,18 @@ def test_confirm_yn_yes(monkeypatch): def test_confirm_yn_no(monkeypatch): - monkeypatch.setattr('sys.stdin', StringIO('n\n')) - args = AttrDict({ - 'dry_run': False, - }) + monkeypatch.setattr("sys.stdin", StringIO("n\n")) + args = AttrDict( + { + "dry_run": False, + } + ) with pytest.raises(CondaSystemExit): confirm_yn(args) def test_dry_run_exit(): - with env_var('CONDA_DRY_RUN', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var("CONDA_DRY_RUN", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol): with pytest.raises(DryRunExit): confirm_yn() @@ -71,8 +83,12 @@ def test_dry_run_exit(): def test_always_yes(): - with env_var('CONDA_ALWAYS_YES', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_DRY_RUN', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_ALWAYS_YES", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + with env_var( + "CONDA_DRY_RUN", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): choice = confirm_yn() assert choice is True diff --git a/tests/cli/test_conda_argparse.py b/tests/cli/test_conda_argparse.py index a266b6c9a68..9b51d99729a 100644 --- a/tests/cli/test_conda_argparse.py +++ b/tests/cli/test_conda_argparse.py @@ -1,11 +1,9 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - +import re from logging import getLogger import pytest -import re from conda.cli.conda_argparse import generate_parser from conda.cli.python_api import Commands, run_command diff --git a/tests/cli/test_config.py b/tests/cli/test_config.py index 706cb05274c..699e2496bde 100644 --- a/tests/cli/test_config.py +++ b/tests/cli/test_config.py @@ -1,21 +1,18 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import os -import pytest - from contextlib import contextmanager from textwrap import dedent -from conda.auxlib.compat import Utf8NamedTemporaryFile +import pytest +from conda.auxlib.compat import Utf8NamedTemporaryFile from conda.base.context import context, reset_context, sys_rc_path, user_rc_path from conda.cli.python_api import Commands, run_command from conda.common.configuration import ConfigurationLoadError -from conda.common.serialize import yaml_round_trip_load, yaml_round_trip_dump +from conda.common.serialize import yaml_round_trip_dump, yaml_round_trip_load from conda.gateways.disk.delete import rm_rf - # use condarc from source tree to run these tests against # # unset 'default_channels' so get_default_channels has predictable behavior @@ -31,11 +28,11 @@ @contextmanager def make_temp_condarc(value=None): try: - tempfile = Utf8NamedTemporaryFile(suffix='.yml', delete=False) + tempfile = Utf8NamedTemporaryFile(suffix=".yml", delete=False) tempfile.close() temp_path = tempfile.name if value: - with open(temp_path, 'w') as f: + with open(temp_path, "w") as f: f.write(value) reset_context([temp_path]) yield temp_path @@ -81,35 +78,49 @@ def _channels_as_yaml(*channels): def test_invalid_yaml(): - condarc = dedent("""\ + condarc = dedent( + """\ fgddgh channels: - test - """) + """ + ) try: with make_temp_condarc(condarc) as rc: - run_command(Commands.CONFIG, '--file', rc, '--add', 'channels', 'test') + run_command(Commands.CONFIG, "--file", rc, "--add", "channels", "test") except ConfigurationLoadError as err: assert "reason: invalid yaml at line" in err.message, err.message def test_channels_add_empty(): with make_temp_condarc() as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--add', - 'channels', 'test') - assert stdout == stderr == '' + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--file", rc, "--add", "channels", "test" + ) + assert stdout == stderr == "" assert _read_test_condarc(rc) == _channels_as_yaml("test", "defaults") def test_channels_add_empty_with_defaults(): # When defaults is explicitly given, it should not be added with make_temp_condarc() as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--add', 'channels', 'test', - '--add', 'channels', 'defaults', - use_exception_handler=True) - assert stdout == '' - assert stderr.strip() == "Warning: 'defaults' already in 'channels' list, moving to the top" + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--add", + "channels", + "test", + "--add", + "channels", + "defaults", + use_exception_handler=True, + ) + assert stdout == "" + assert ( + stderr.strip() + == "Warning: 'defaults' already in 'channels' list, moving to the top" + ) assert _read_test_condarc(rc) == _channels_as_yaml("defaults", "test") @@ -117,98 +128,145 @@ def test_channels_add_duplicate(): channels_initial = _channels_as_yaml("test", "defaults", "mychannel") channels_expected = _channels_as_yaml("mychannel", "test", "defaults") with make_temp_condarc(channels_initial) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--add', 'channels', 'mychannel', - use_exception_handler=True) - assert stdout == '' - assert stderr.strip() == "Warning: 'mychannel' already in 'channels' list, moving to the top" + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--add", + "channels", + "mychannel", + use_exception_handler=True, + ) + assert stdout == "" + assert ( + stderr.strip() + == "Warning: 'mychannel' already in 'channels' list, moving to the top" + ) assert _read_test_condarc(rc) == channels_expected def test_channels_prepend(): channels_expected = _channels_as_yaml("mychannel", "test", "defaults") with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--prepend', 'channels', 'mychannel') - assert stdout == stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--prepend", "channels", "mychannel" + ) + assert stdout == stderr == "" assert _read_test_condarc(rc) == channels_expected + "\n" + CONDARC_OTHER def test_channels_prepend_duplicate(): channels_expected = _channels_as_yaml("defaults", "test") with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--prepend', 'channels', 'defaults') - assert stdout == '' - assert stderr.strip() == "Warning: 'defaults' already in 'channels' list, moving to the top" + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--prepend", "channels", "defaults" + ) + assert stdout == "" + assert ( + stderr.strip() + == "Warning: 'defaults' already in 'channels' list, moving to the top" + ) assert _read_test_condarc(rc) == channels_expected + CONDARC_OTHER def test_channels_append(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--append', 'channels', 'mychannel', - use_exception_handler=True) - assert stdout == stderr == '' - assert _read_test_condarc(rc) == \ - CONDARC_CHANNELS + "\n - mychannel\n" + CONDARC_OTHER + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--append", + "channels", + "mychannel", + use_exception_handler=True, + ) + assert stdout == stderr == "" + assert ( + _read_test_condarc(rc) + == CONDARC_CHANNELS + "\n - mychannel\n" + CONDARC_OTHER + ) def test_channels_append_duplicate(): channels_expected = _channels_as_yaml("defaults", "test") with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--append', 'channels', 'test', - use_exception_handler=True) - assert stdout == '' - assert stderr.strip() == "Warning: 'test' already in 'channels' list, moving to the bottom" + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--append", + "channels", + "test", + use_exception_handler=True, + ) + assert stdout == "" + assert ( + stderr.strip() + == "Warning: 'test' already in 'channels' list, moving to the bottom" + ) assert _read_test_condarc(rc) == channels_expected + CONDARC_OTHER def test_channels_remove(): channels_expected = _channels_as_yaml("defaults") with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--remove', 'channels', 'test') - assert stdout == stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--remove", "channels", "test" + ) + assert stdout == stderr == "" assert _read_test_condarc(rc) == channels_expected + CONDARC_OTHER def test_channels_remove_duplicate(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--remove', 'channels', 'test') - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--remove', 'channels', 'test', - use_exception_handler=True) - assert stdout == '' - assert stderr.strip() == "CondaKeyError: 'channels': 'test' is not "\ - "in the 'channels' key of the config file" + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--remove", "channels", "test" + ) + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--remove", + "channels", + "test", + use_exception_handler=True, + ) + assert stdout == "" + assert ( + stderr.strip() == "CondaKeyError: 'channels': 'test' is not " + "in the 'channels' key of the config file" + ) def test_create_condarc_on_set(): with make_temp_condarc() as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, - '--set', 'always_yes', 'true') - assert stdout == stderr == '' + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--file", rc, "--set", "always_yes", "true" + ) + assert stdout == stderr == "" assert _read_test_condarc(rc) == "always_yes: true\n" def test_show_sorts_keys(): # test alphabetical yaml output with make_temp_condarc() as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--show') + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--file", rc, "--show" + ) output_keys = yaml_round_trip_load(stdout).keys() - assert stderr == '' + assert stderr == "" assert sorted(output_keys) == [item for item in output_keys] def test_get_all(): condarc = CONDARC_BASE + "\n\ninvalid_key: true\n" with make_temp_condarc(condarc) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, '--get', use_exception_handler=True) - assert stdout == dedent("""\ + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", use_exception_handler=True + ) + assert stdout == dedent( + """\ --set always_yes True --set changeps1 False --set channel_alias http://alpha.conda.anaconda.org @@ -216,16 +274,19 @@ def test_get_all(): --add channels 'test' # highest priority --add create_default_packages 'numpy' --add create_default_packages 'ipython' - """) + """ + ) assert stderr.strip() == "unknown key invalid_key" def test_get_all_inc_maps(): - condarc = ("invalid_key: true\nchangeps1: false\n" + - CONDARC_CHANNELS + CONDARC_MAPS) + condarc = "invalid_key: true\nchangeps1: false\n" + CONDARC_CHANNELS + CONDARC_MAPS with make_temp_condarc(condarc) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, '--get', use_exception_handler=True) - assert stdout == dedent("""\ + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", use_exception_handler=True + ) + assert stdout == dedent( + """\ --set changeps1 False --add channels 'defaults' # lowest priority --add channels 'test' # highest priority @@ -233,53 +294,62 @@ def test_get_all_inc_maps(): --set conda_build.error_overlinking True --set proxy_servers.http 1.2.3.4:5678 --set proxy_servers.https 1.2.3.4:5678 - """) + """ + ) assert stderr.strip() == "unknown key invalid_key" def test_get_channels_list(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'channels') - assert stdout == dedent("""\ + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "channels" + ) + assert stdout == dedent( + """\ --add channels 'defaults' # lowest priority --add channels 'test' # highest priority - """) + """ + ) assert stderr == "" def test_get_boolean_value(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'changeps1') + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "changeps1" + ) assert stdout.strip() == "--set changeps1 False" assert stderr == "" def test_get_string_value(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'channel_alias') + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "channel_alias" + ) assert stdout.strip() == "--set channel_alias http://alpha.conda.anaconda.org" assert stderr == "" -@pytest.mark.parametrize("key,value", [ - ("proxy_servers.http", "1.2.3.4:5678"), - ("conda_build.cache_dir", "/tmp/conda-bld"), - ]) +@pytest.mark.parametrize( + "key,value", + [ + ("proxy_servers.http", "1.2.3.4:5678"), + ("conda_build.cache_dir", "/tmp/conda-bld"), + ], +) def test_get_map_subkey(key, value): with make_temp_condarc(CONDARC_MAPS) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', key) + stdout, stderr, _ = run_command(Commands.CONFIG, "--file", rc, "--get", key) assert stdout.strip() == f"--set {key} {value}" assert stderr == "" def test_get_map_full(): with make_temp_condarc(CONDARC_MAPS) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'proxy_servers') + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "proxy_servers" + ) assert "--set proxy_servers.http 1.2.3.4:5678\n" in stdout assert "--set proxy_servers.https 1.2.3.4:5678\n" in stdout assert stderr == "" @@ -287,43 +357,53 @@ def test_get_map_full(): def test_get_multiple_keys(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'changeps1', 'channels') - assert stdout == dedent("""\ + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "changeps1", "channels" + ) + assert stdout == dedent( + """\ --set changeps1 False --add channels 'defaults' # lowest priority --add channels 'test' # highest priority - """) + """ + ) assert stderr == "" def test_get_multiple_keys_incl_map_subkey(): with make_temp_condarc(CONDARC_BASE + CONDARC_MAPS) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'changeps1', 'proxy_servers.http') - assert stdout == dedent("""\ + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "changeps1", "proxy_servers.http" + ) + assert stdout == dedent( + """\ --set changeps1 False --set proxy_servers.http 1.2.3.4:5678 - """) + """ + ) assert stderr == "" def test_get_multiple_keys_incl_map_full(): with make_temp_condarc(CONDARC_BASE + CONDARC_MAPS) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'changeps1', 'proxy_servers') - assert stdout == dedent("""\ + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "changeps1", "proxy_servers" + ) + assert stdout == dedent( + """\ --set changeps1 False --set proxy_servers.http 1.2.3.4:5678 --set proxy_servers.https 1.2.3.4:5678 - """) + """ + ) assert stderr == "" def test_get_unconfigured_key(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'allow_softlinks') + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--get", "allow_softlinks" + ) assert stdout == "" assert stderr == "" @@ -331,9 +411,14 @@ def test_get_unconfigured_key(): def test_get_invalid_key(): condarc = CONDARC_BASE with make_temp_condarc(condarc) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', 'invalid_key', - use_exception_handler=True) + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--get", + "invalid_key", + use_exception_handler=True, + ) assert stdout == "" assert stderr.strip() == "unknown key invalid_key" @@ -341,38 +426,54 @@ def test_get_invalid_key(): def test_set_key(): key, from_val, to_val = "changeps1", "true", "false" with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _= run_command(Commands.CONFIG, '--file', rc, - '--set', key, to_val) - assert stdout == stderr == '' - assert _read_test_condarc(rc)== \ - CONDARC_BASE.replace(f"{key}: {from_val}", f"{key}: {to_val}") - - -@pytest.mark.parametrize("key,from_val,to_val", [ - ("proxy_servers.http", "1.2.3.4:5678", "4.3.2.1:9876"), - ("conda_build.cache_dir", "/tmp/conda-bld", "/var/tmp/build"), - # broken: write process for conda_build section converts bools to strings - pytest.param("conda_build.error_overlinking", "true", "false", - marks=pytest.mark.skip("known to be broken")), - ]) + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--set", key, to_val + ) + assert stdout == stderr == "" + assert _read_test_condarc(rc) == CONDARC_BASE.replace( + f"{key}: {from_val}", f"{key}: {to_val}" + ) + + +@pytest.mark.parametrize( + "key,from_val,to_val", + [ + ("proxy_servers.http", "1.2.3.4:5678", "4.3.2.1:9876"), + ("conda_build.cache_dir", "/tmp/conda-bld", "/var/tmp/build"), + # broken: write process for conda_build section converts bools to strings + pytest.param( + "conda_build.error_overlinking", + "true", + "false", + marks=pytest.mark.skip("known to be broken"), + ), + ], +) def test_set_map_key(key, from_val, to_val): parent_key, sub_key = key.split(".") with make_temp_condarc(CONDARC_MAPS) as rc: - stdout, stderr, _= run_command(Commands.CONFIG, '--file', rc, - '--set', key, to_val) - assert stdout == stderr == '' - assert _read_test_condarc(rc) == \ - CONDARC_MAPS.replace(f" {sub_key}: {from_val}", - f" {sub_key}: {to_val}") + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--set", key, to_val + ) + assert stdout == stderr == "" + assert _read_test_condarc(rc) == CONDARC_MAPS.replace( + f" {sub_key}: {from_val}", f" {sub_key}: {to_val}" + ) def test_set_unconfigured_key(): key, to_val = "restore_free_channel", "true" with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _= run_command(Commands.CONFIG, '--file', rc, - '--set', key, to_val, - use_exception_handler=True) - assert stdout == stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--set", + key, + to_val, + use_exception_handler=True, + ) + assert stdout == stderr == "" assert _read_test_condarc(rc) == CONDARC_BASE + f"{key}: {to_val}\n" @@ -380,42 +481,61 @@ def test_set_invalid_key(): key, to_val = "invalid_key", "a_bogus_value" error = f"CondaValueError: Key '{key}' is not a known primitive parameter." with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _= run_command(Commands.CONFIG, '--file', rc, - '--set', key, to_val, - use_exception_handler=True) - assert stdout == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--set", + key, + to_val, + use_exception_handler=True, + ) + assert stdout == "" assert stderr.strip() == error - assert _read_test_condarc(rc)== CONDARC_BASE + assert _read_test_condarc(rc) == CONDARC_BASE def test_add_key(): with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--add', 'disallowed_packages', 'perl') - assert stdout == stderr == '' - assert _read_test_condarc(rc) == \ - CONDARC_BASE + "disallowed_packages:\n - perl\n" + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--add", "disallowed_packages", "perl" + ) + assert stdout == stderr == "" + assert ( + _read_test_condarc(rc) == CONDARC_BASE + "disallowed_packages:\n - perl\n" + ) def test_add_invalid_key(): key, to_val = "invalid_key", "a_bogus_value" error = f"CondaValueError: Key '{key}' is not a known sequence parameter." with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _= run_command(Commands.CONFIG, '--file', rc, - '--add', key, to_val, - use_exception_handler=True) - assert stdout == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--add", + key, + to_val, + use_exception_handler=True, + ) + assert stdout == "" assert stderr.strip() == error - assert _read_test_condarc(rc)== CONDARC_BASE + assert _read_test_condarc(rc) == CONDARC_BASE def test_remove_key(): key, value = "changeps1", "false" with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--remove-key', key, - use_exception_handler=True) - assert stdout == stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--remove-key", + key, + use_exception_handler=True, + ) + assert stdout == stderr == "" assert f"{key}: {value}\n" not in _read_test_condarc(rc) @@ -423,13 +543,23 @@ def test_remove_key_duplicate(): key, value = "changeps1", "false" error = f"CondaKeyError: '{key}': key '{key}' is not in the config file" with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--remove-key', key, - use_exception_handler=True) - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--remove-key', key, - use_exception_handler=True) - assert stdout == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--remove-key", + key, + use_exception_handler=True, + ) + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--remove-key", + key, + use_exception_handler=True, + ) + assert stdout == "" assert stderr.strip() == error assert f"{key}: {value}\n" not in _read_test_condarc(rc) @@ -438,28 +568,42 @@ def test_remove_unconfigured_key(): key = "restore_free_channel" error = f"CondaKeyError: '{key}': key '{key}' is not in the config file" with make_temp_condarc(CONDARC_BASE) as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--remove-key', key, - use_exception_handler=True) - assert stdout == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--remove-key", + key, + use_exception_handler=True, + ) + assert stdout == "" assert stderr.strip() == error assert _read_test_condarc(rc) == CONDARC_BASE -@pytest.mark.parametrize("key,str_value,py_value", [ - ("always_yes", "yes", True), - ("always_yes", "no", False), - ("always_yes", "true", True), - ("always_yes", "false", False), - ("channel_alias", "https://repo.example.com", "https://repo.example.com"), - ("proxy_servers.http", "1.2.3.4:5678", {'http': '1.2.3.4:5678'}), - ]) +@pytest.mark.parametrize( + "key,str_value,py_value", + [ + ("always_yes", "yes", True), + ("always_yes", "no", False), + ("always_yes", "true", True), + ("always_yes", "false", False), + ("channel_alias", "https://repo.example.com", "https://repo.example.com"), + ("proxy_servers.http", "1.2.3.4:5678", {"http": "1.2.3.4:5678"}), + ], +) def test_set_check_types(key, str_value, py_value): with make_temp_condarc() as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--set', key, str_value, - use_exception_handler=True) - assert stdout == stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, + "--file", + rc, + "--set", + key, + str_value, + use_exception_handler=True, + ) + assert stdout == stderr == "" with open(rc) as fh: content = yaml_round_trip_load(fh.read()) if "." in key: @@ -468,14 +612,14 @@ def test_set_check_types(key, str_value, py_value): def test_set_and_get_bool(): - key = 'restore_free_channel' + key = "restore_free_channel" with make_temp_condarc() as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--set', key, 'yes') - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--get', key) - assert stdout.strip() == f'--set {key} True' - assert stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--set", key, "yes" + ) + stdout, stderr, _ = run_command(Commands.CONFIG, "--file", rc, "--get", key) + assert stdout.strip() == f"--set {key} True" + assert stderr == "" def test_ssl_verify_default(): @@ -486,24 +630,25 @@ def test_ssl_verify_default(): def test_ssl_verify_set_bool(): with make_temp_condarc() as rc: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--set', 'ssl_verify', 'no') - assert stdout == stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--set", "ssl_verify", "no" + ) + assert stdout == stderr == "" reset_context([rc]) assert context.ssl_verify is False def test_ssl_verify_set_filename(): with make_temp_condarc() as rc, Utf8NamedTemporaryFile() as tf: - stdout, stderr, _ = run_command(Commands.CONFIG, '--file', rc, - '--set', 'ssl_verify', tf.name) - assert stdout == stderr == '' + stdout, stderr, _ = run_command( + Commands.CONFIG, "--file", rc, "--set", "ssl_verify", tf.name + ) + assert stdout == stderr == "" reset_context([rc]) assert context.ssl_verify == tf.name def test_set_rc_without_user_rc(): - if os.path.exists(sys_rc_path): # Backup system rc_config with open(sys_rc_path) as fh: @@ -524,77 +669,117 @@ def test_set_rc_without_user_rc(): try: # Write custom system sys_rc_config - with open(sys_rc_path, 'w') as rc: - rc.write(yaml_round_trip_dump({'channels':['conda-forge']})) + with open(sys_rc_path, "w") as rc: + rc.write(yaml_round_trip_dump({"channels": ["conda-forge"]})) except OSError: # In case, we don't have writing right to the system rc config file pytest.skip("No writing right to root prefix.") # This would create a user rc_config - stdout, stderr, return_code = run_command(Commands.CONFIG, - '--add', 'channels', 'test') - assert stdout == stderr == '' - assert yaml_round_trip_load(_read_test_condarc(user_rc_path)) == {'channels': ['test', 'conda-forge']} + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--add", "channels", "test" + ) + assert stdout == stderr == "" + assert yaml_round_trip_load(_read_test_condarc(user_rc_path)) == { + "channels": ["test", "conda-forge"] + } if restore_user_rc_config_backup: # Restore previous user rc_config - with open(user_rc_path, 'w') as rc: + with open(user_rc_path, "w") as rc: rc.write(yaml_round_trip_dump(user_rc_config_backup)) if restore_sys_rc_config_backup: # Restore previous system rc_config - with open(sys_rc_path, 'w') as rc: + with open(sys_rc_path, "w") as rc: rc.write(yaml_round_trip_dump(sys_rc_config_backup)) def test_custom_multichannels_append(): with make_temp_condarc() as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--append', - 'custom_multichannels.foo', 'bar') - assert stdout == stderr == '' - assert _read_test_condarc(rc) == yaml_round_trip_dump({"custom_multichannels": {"foo": ["bar"]}}) + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--file", rc, "--append", "custom_multichannels.foo", "bar" + ) + assert stdout == stderr == "" + assert _read_test_condarc(rc) == yaml_round_trip_dump( + {"custom_multichannels": {"foo": ["bar"]}} + ) def test_custom_multichannels_add(): with make_temp_condarc() as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--add', - 'custom_multichannels.foo', 'bar') - assert stdout == stderr == '' - assert _read_test_condarc(rc) == yaml_round_trip_dump({"custom_multichannels": {"foo": ["bar"]}}) + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--file", rc, "--add", "custom_multichannels.foo", "bar" + ) + assert stdout == stderr == "" + assert _read_test_condarc(rc) == yaml_round_trip_dump( + {"custom_multichannels": {"foo": ["bar"]}} + ) def test_custom_multichannels_prepend(): with make_temp_condarc() as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--prepend', - 'custom_multichannels.foo', 'bar') - assert stdout == stderr == '' - assert _read_test_condarc(rc) == yaml_round_trip_dump({"custom_multichannels": {"foo": ["bar"]}}) + stdout, stderr, return_code = run_command( + Commands.CONFIG, + "--file", + rc, + "--prepend", + "custom_multichannels.foo", + "bar", + ) + assert stdout == stderr == "" + assert _read_test_condarc(rc) == yaml_round_trip_dump( + {"custom_multichannels": {"foo": ["bar"]}} + ) def test_custom_multichannels_append_duplicate(): - custom_multichannels_expected = yaml_round_trip_dump({"custom_multichannels": {"foo": ["bar"]}}) + custom_multichannels_expected = yaml_round_trip_dump( + {"custom_multichannels": {"foo": ["bar"]}} + ) with make_temp_condarc(custom_multichannels_expected) as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--append', - 'custom_multichannels.foo', 'bar') - assert stdout == '' - assert stderr.strip() == "Warning: 'bar' already in 'custom_multichannels.foo' list, moving to the bottom" + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--file", rc, "--append", "custom_multichannels.foo", "bar" + ) + assert stdout == "" + assert ( + stderr.strip() + == "Warning: 'bar' already in 'custom_multichannels.foo' list, moving to the bottom" + ) assert _read_test_condarc(rc) == custom_multichannels_expected def test_custom_multichannels_add_duplicate(): - custom_multichannels_expected = yaml_round_trip_dump({"custom_multichannels": {"foo": ["bar"]}}) + custom_multichannels_expected = yaml_round_trip_dump( + {"custom_multichannels": {"foo": ["bar"]}} + ) with make_temp_condarc(custom_multichannels_expected) as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--add', - 'custom_multichannels.foo', 'bar') - assert stdout == '' - assert stderr.strip() == "Warning: 'bar' already in 'custom_multichannels.foo' list, moving to the top" + stdout, stderr, return_code = run_command( + Commands.CONFIG, "--file", rc, "--add", "custom_multichannels.foo", "bar" + ) + assert stdout == "" + assert ( + stderr.strip() + == "Warning: 'bar' already in 'custom_multichannels.foo' list, moving to the top" + ) assert _read_test_condarc(rc) == custom_multichannels_expected def test_custom_multichannels_prepend_duplicate(): - custom_multichannels_expected = yaml_round_trip_dump({"custom_multichannels": {"foo": ["bar"]}}) + custom_multichannels_expected = yaml_round_trip_dump( + {"custom_multichannels": {"foo": ["bar"]}} + ) with make_temp_condarc(custom_multichannels_expected) as rc: - stdout, stderr, return_code = run_command(Commands.CONFIG, '--file', rc, '--prepend', - 'custom_multichannels.foo', 'bar') - assert stdout == '' - assert stderr.strip() == "Warning: 'bar' already in 'custom_multichannels.foo' list, moving to the top" + stdout, stderr, return_code = run_command( + Commands.CONFIG, + "--file", + rc, + "--prepend", + "custom_multichannels.foo", + "bar", + ) + assert stdout == "" + assert ( + stderr.strip() + == "Warning: 'bar' already in 'custom_multichannels.foo' list, moving to the top" + ) assert _read_test_condarc(rc) == custom_multichannels_expected diff --git a/tests/cli/test_main_clean.py b/tests/cli/test_main_clean.py index cacc1cac294..6a97f6a51b5 100644 --- a/tests/cli/test_main_clean.py +++ b/tests/cli/test_main_clean.py @@ -1,15 +1,25 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from datetime import datetime from json import loads as json_loads from os import walk -from os.path import basename, isdir, join, exists +from os.path import basename, exists, isdir, join from shutil import copy -from conda.base.constants import CONDA_PACKAGE_EXTENSIONS, CONDA_TEMP_EXTENSIONS, CONDA_LOGS_DIR + +from conda.base.constants import ( + CONDA_LOGS_DIR, + CONDA_PACKAGE_EXTENSIONS, + CONDA_TEMP_EXTENSIONS, +) from conda.core.subdir_data import create_cache_dir from conda.gateways.disk.create import mkdir_p -from conda.testing.integration import make_temp_package_cache, run_command, Commands, make_temp_env +from conda.testing.integration import ( + Commands, + make_temp_env, + make_temp_package_cache, + run_command, +) + def _get_pkgs(pkgs_dir): _, dirs, _ = next(walk(pkgs_dir)) @@ -18,7 +28,11 @@ def _get_pkgs(pkgs_dir): def _get_tars(pkgs_dir): _, _, files = next(walk(pkgs_dir)) - return [join(pkgs_dir, file) for file in files if file.endswith(CONDA_PACKAGE_EXTENSIONS)] + return [ + join(pkgs_dir, file) + for file in files + if file.endswith(CONDA_PACKAGE_EXTENSIONS) + ] def _get_index_cache(): @@ -29,7 +43,9 @@ def _get_index_cache(): def _get_tempfiles(pkgs_dir): _, _, files = next(walk(pkgs_dir)) - return [join(pkgs_dir, file) for file in files if file.endswith(CONDA_TEMP_EXTENSIONS)] + return [ + join(pkgs_dir, file) for file in files if file.endswith(CONDA_TEMP_EXTENSIONS) + ] def _get_logfiles(pkgs_dir): @@ -58,7 +74,9 @@ def test_clean_force_pkgs_dirs(clear_cache): assert isdir(pkgs_dir) with make_temp_env(pkg): - stdout, _, _ = run_command(Commands.CLEAN, "", "--force-pkgs-dirs", "--yes", "--json") + stdout, _, _ = run_command( + Commands.CLEAN, "", "--force-pkgs-dirs", "--yes", "--json" + ) json_loads(stdout) # assert valid json # pkgs_dir is removed @@ -81,14 +99,18 @@ def test_clean_and_packages(clear_cache): assert_any_pkg(pkg, _get_pkgs(pkgs_dir)) # --json flag is regression test for #5451 - stdout, _, _ = run_command(Commands.CLEAN, "", "--packages", "--yes", "--json") + stdout, _, _ = run_command( + Commands.CLEAN, "", "--packages", "--yes", "--json" + ) json_loads(stdout) # assert valid json # pkg still exists since its in use by temp env assert_any_pkg(pkg, _get_pkgs(pkgs_dir)) run_command(Commands.REMOVE, prefix, pkg, "--yes", "--json") - stdout, _, _ = run_command(Commands.CLEAN, "", "--packages", "--yes", "--json") + stdout, _, _ = run_command( + Commands.CLEAN, "", "--packages", "--yes", "--json" + ) json_loads(stdout) # assert valid json # pkg is removed @@ -111,7 +133,9 @@ def test_clean_tarballs(clear_cache): assert_any_pkg(pkg, _get_tars(pkgs_dir)) # --json flag is regression test for #5451 - stdout, _, _ = run_command(Commands.CLEAN, "", "--tarballs", "--yes", "--json") + stdout, _, _ = run_command( + Commands.CLEAN, "", "--tarballs", "--yes", "--json" + ) json_loads(stdout) # assert valid json # tarball is removed @@ -133,7 +157,9 @@ def test_clean_index_cache(clear_cache): # index cache exists assert _get_index_cache() - stdout, _, _ = run_command(Commands.CLEAN, "", "--index-cache", "--yes", "--json") + stdout, _, _ = run_command( + Commands.CLEAN, "", "--index-cache", "--yes", "--json" + ) json_loads(stdout) # assert valid json # index cache is cleared @@ -208,7 +234,9 @@ def test_clean_logfiles(clear_cache): assert path in _get_logfiles(pkgs_dir) # --json flag is regression test for #5451 - stdout, _, _ = run_command(Commands.CLEAN, "", "--logfiles", "--yes", "--json") + stdout, _, _ = run_command( + Commands.CLEAN, "", "--logfiles", "--yes", "--json" + ) json_loads(stdout) # assert valid json # logfiles removed @@ -248,7 +276,9 @@ def test_clean_all(clear_cache): assert not cache run_command(Commands.REMOVE, prefix, pkg, "--yes", "--json") - stdout, _, _ = run_command(Commands.CLEAN, "", "--packages", "--yes", "--json") + stdout, _, _ = run_command( + Commands.CLEAN, "", "--packages", "--yes", "--json" + ) json_loads(stdout) # assert valid json # pkg is removed diff --git a/tests/cli/test_main_info.py b/tests/cli/test_main_info.py index d967abb0905..7a4632a00d5 100644 --- a/tests/cli/test_main_info.py +++ b/tests/cli/test_main_info.py @@ -1,10 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - import json from logging import getLogger - from os.path import isdir from conda.cli.python_api import Commands, run_command diff --git a/tests/cli/test_main_notices.py b/tests/cli/test_main_notices.py index 9df735e87e2..91db56117f6 100644 --- a/tests/cli/test_main_notices.py +++ b/tests/cli/test_main_notices.py @@ -8,17 +8,17 @@ import pytest -from conda.base.context import context from conda.base.constants import NOTICES_DECORATOR_DISPLAY_INTERVAL -from conda.cli import main_notices as notices +from conda.base.context import context from conda.cli import conda_argparse +from conda.cli import main_notices as notices from conda.notices import fetch from conda.testing.helpers import run_inprocess_conda_command as run from conda.testing.notices.helpers import ( add_resp_to_mock, create_notice_cache_files, - get_test_notices, get_notice_cache_filenames, + get_test_notices, offset_cache_file_mtime, ) @@ -71,7 +71,10 @@ def test_main_notices( def test_main_notices_reads_from_cache( - capsys, conda_notices_args_n_parser, notices_cache_dir, notices_mock_http_session_get + capsys, + conda_notices_args_n_parser, + notices_cache_dir, + notices_mock_http_session_get, ): """ Test the full working path through the code when reading from cache instead of making @@ -99,7 +102,10 @@ def test_main_notices_reads_from_cache( def test_main_notices_reads_from_expired_cache( - capsys, conda_notices_args_n_parser, notices_cache_dir, notices_mock_http_session_get + capsys, + conda_notices_args_n_parser, + notices_cache_dir, + notices_mock_http_session_get, ): """ Test the full working path through the code when reading from cache instead of making @@ -112,7 +118,9 @@ def test_main_notices_reads_from_expired_cache( messages = ("Test One", "Test Two") messages_different = ("With different value one", "With different value two") - created_at = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=14) + created_at = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta( + days=14 + ) cache_files = get_notice_cache_filenames(context) # Cache first version of notices, with a cache date we know is expired @@ -125,7 +133,9 @@ def test_main_notices_reads_from_expired_cache( # different messages messages_different_json = get_test_notices(messages_different) add_resp_to_mock( - notices_mock_http_session_get, status_code=200, messages_json=messages_different_json + notices_mock_http_session_get, + status_code=200, + messages_json=messages_different_json, ) notices.execute(args, parser) @@ -140,7 +150,10 @@ def test_main_notices_reads_from_expired_cache( def test_main_notices_handles_bad_expired_at_field( - capsys, conda_notices_args_n_parser, notices_cache_dir, notices_mock_http_session_get + capsys, + conda_notices_args_n_parser, + notices_cache_dir, + notices_mock_http_session_get, ): """ This test ensures that an incorrectly defined `notices.json` file doesn't completely break @@ -205,10 +218,14 @@ def test_cache_names_appear_as_expected( """ This is a test to make sure the cache filenames appear as we expect them to. """ - with mock.patch("conda.notices.core.get_channel_name_and_urls") as get_channel_name_and_urls: + with mock.patch( + "conda.notices.core.get_channel_name_and_urls" + ) as get_channel_name_and_urls: channel_url = "http://localhost/notices.json" get_channel_name_and_urls.return_value = ((channel_url, "channel_name"),) - expected_cache_filename = f"{hashlib.sha256(channel_url.encode()).hexdigest()}.json" + expected_cache_filename = ( + f"{hashlib.sha256(channel_url.encode()).hexdigest()}.json" + ) args, parser = conda_notices_args_n_parser messages = ("Test One", "Test Two") @@ -233,7 +250,9 @@ def test_cache_names_appear_as_expected( assert os.path.basename(cache_files[0]) == expected_cache_filename -def test_notices_appear_once_when_running_decorated_commands(tmpdir, env_one, notices_cache_dir): +def test_notices_appear_once_when_running_decorated_commands( + tmpdir, env_one, notices_cache_dir +): """ As a user, I want to make sure when I run commands like "install" and "update" that the channels are only appearing according to the specified interval in: @@ -312,18 +331,24 @@ def test_notices_does_not_interrupt_command_on_failure( "conda.notices.core.logger.error" ) as mock_logger: mock_open.side_effect = [PermissionError(error_message)] - _, _, exit_code = run(f"conda create -n {env_name} -y -c local --override-channels") + _, _, exit_code = run( + f"conda create -n {env_name} -y -c local --override-channels" + ) assert exit_code is None - assert mock_logger.call_args == mock.call(f"Unable to open cache file: {error_message}") + assert mock_logger.call_args == mock.call( + f"Unable to open cache file: {error_message}" + ) _, _, exit_code = run(f"conda env remove -n {env_name}") assert exit_code is None -def test_notices_cannot_read_cache_files(notices_cache_dir, notices_mock_http_session_get): +def test_notices_cannot_read_cache_files( + notices_cache_dir, notices_mock_http_session_get +): """ As a user, when I run `conda notices` and the cache file cannot be read or written, I want to see an error message. diff --git a/tests/cli/test_main_rename.py b/tests/cli/test_main_rename.py index 6a9c6c8cbaf..2bfa3578359 100644 --- a/tests/cli/test_main_rename.py +++ b/tests/cli/test_main_rename.py @@ -3,16 +3,17 @@ from __future__ import annotations import json -from unittest import mock import os.path import pathlib import tempfile +from unittest import mock import pytest from conda.base.context import context, locate_prefix_by_name from conda.exceptions import CondaError, EnvironmentNameNotFound -from conda.testing.helpers import run_inprocess_conda_command as run, set_active_prefix +from conda.testing.helpers import run_inprocess_conda_command as run +from conda.testing.helpers import set_active_prefix TEST_ENV_NAME_1 = "env-1" TEST_ENV_NAME_2 = "env-2" @@ -70,14 +71,20 @@ def list_envs(): def test_rename_by_name_success(env_one): - run(f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_RENAME}", disallow_stderr=False) + run( + f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_RENAME}", + disallow_stderr=False, + ) assert locate_prefix_by_name(TEST_ENV_NAME_RENAME) with pytest.raises(EnvironmentNameNotFound): locate_prefix_by_name(TEST_ENV_NAME_1) # Clean up - run(f"conda rename -n {TEST_ENV_NAME_RENAME} {TEST_ENV_NAME_1}", disallow_stderr=False) + run( + f"conda rename -n {TEST_ENV_NAME_RENAME} {TEST_ENV_NAME_1}", + disallow_stderr=False, + ) def test_rename_by_path_success(env_one): @@ -104,7 +111,10 @@ def test_rename_by_name_name_already_exists_error(env_one): out, err, exit_code = run( f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_1}", disallow_stderr=False ) - assert f"The environment '{TEST_ENV_NAME_1}' already exists. Override with --force" in err + assert ( + f"The environment '{TEST_ENV_NAME_1}' already exists. Override with --force" + in err + ) def test_rename_by_path_path_already_exists_error(env_one): @@ -130,7 +140,8 @@ def test_cannot_rename_base_env_by_name(env_one): def test_cannot_rename_base_env_by_path(env_one): """Test to ensure that we cannot rename the base env invoked by path""" out, err, exit_code = run( - f"conda rename -p {context.root_prefix} {TEST_ENV_NAME_RENAME}", disallow_stderr=False + f"conda rename -p {context.root_prefix} {TEST_ENV_NAME_RENAME}", + disallow_stderr=False, ) assert "The 'base' environment cannot be renamed" in err @@ -150,7 +161,8 @@ def test_cannot_rename_active_env_by_name(env_one): with set_active_prefix(prefix): out, err, exit_code = run( - f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_RENAME}", disallow_stderr=False + f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_RENAME}", + disallow_stderr=False, ) assert "Cannot rename the active environment" in err @@ -161,7 +173,10 @@ def test_rename_with_force(env_one, env_two): Without this flag, it would return with an error message. """ # Do a force rename - run(f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_2} --force", disallow_stderr=False) + run( + f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_2} --force", + disallow_stderr=False, + ) (_, _, exit_code), _ = list_envs() @@ -185,7 +200,8 @@ def test_rename_with_force_with_errors(env_one, env_two): with mock.patch("conda.cli.main_rename.install.clone") as clone_mock: clone_mock.side_effect = [CondaError(error_message)] _, err, exit_code = run( - f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_2} --force", disallow_stderr=False + f"conda rename -n {TEST_ENV_NAME_1} {TEST_ENV_NAME_2} --force", + disallow_stderr=False, ) assert error_message in err assert exit_code == 1 @@ -196,6 +212,7 @@ def test_rename_with_force_with_errors(env_one, env_two): (_, _, exit_code), _ = list_envs() assert exit_code is None + def test_rename_with_force_with_errors_prefix(env_prefix_one): """ Runs a test using --force flag while mocking an exception. @@ -207,7 +224,6 @@ def test_rename_with_force_with_errors_prefix(env_prefix_one): with mock.patch( "conda.cli.main_rename.install.clone" ) as clone_mock, tempfile.TemporaryDirectory() as tmpdir: - clone_mock.side_effect = [CondaError(error_message)] out, err, exit_code = run( f"conda rename -p {env_prefix_one} {tmpdir} --force", disallow_stderr=False diff --git a/tests/common/os/test_windows.py b/tests/common/os/test_windows.py index ba44959d511..3069c4e5037 100644 --- a/tests/common/os/test_windows.py +++ b/tests/common/os/test_windows.py @@ -1,9 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - -from conda.common.compat import on_win from conda.common._os.windows import is_admin_on_windows +from conda.common.compat import on_win def test_is_admin_on_windows(): diff --git a/tests/common/pkg_formats/test_python.py b/tests/common/pkg_formats/test_python.py index 70501392793..9fc31b3b552 100644 --- a/tests/common/pkg_formats/test_python.py +++ b/tests/common/pkg_formats/test_python.py @@ -2,21 +2,33 @@ # SPDX-License-Identifier: BSD-3-Clause """Test for python distribution information and metadata handling.""" -from errno import ENOENT import os +import tempfile +from errno import ENOENT from os.path import basename, lexists from pprint import pprint -import tempfile + +import pytest from conda.common.path import get_python_site_packages_short_path from conda.common.pkg_formats.python import ( - MetadataWarning, PySpec, PythonDistribution, PythonDistributionMetadata, - PythonEggInfoDistribution, PythonInstalledDistribution, get_default_marker_context, - get_dist_file_from_egg_link, get_site_packages_anchor_files, interpret, norm_package_name, - norm_package_version, parse_specification, pypi_name_to_conda_name, split_spec, + MetadataWarning, + PySpec, + PythonDistribution, + PythonDistributionMetadata, + PythonEggInfoDistribution, + PythonInstalledDistribution, + get_default_marker_context, + get_dist_file_from_egg_link, + get_site_packages_anchor_files, + interpret, + norm_package_name, + norm_package_version, + parse_specification, + pypi_name_to_conda_name, + split_spec, ) from conda.common.url import join_url -import pytest from tests.data.env_metadata import METADATA_VERSION_PATHS @@ -41,7 +53,7 @@ def _create_test_files(test_files): except Exception: pass - with open(fpath, 'w') as fh: + with open(fpath, "w") as fh: fh.write(content) fpaths.append(fpath) return temp_path, fpaths @@ -51,22 +63,22 @@ def _print_output(*args): """Helper function to print output in case of failed tests.""" for arg in args: print(arg) - print('\n') + print("\n") # Test module helper functions # ----------------------------------------------------------------------------- def test_norm_package_name(): test_names = ( - (None, ''), - ('', ''), - ('pyOpenssl', 'pyopenssl'), - ('py.Openssl', 'py-openssl'), - ('py-Openssl', 'py-openssl'), - ('py_Openssl', 'py-openssl'), - ('zope.interface', 'zope-interface'), + (None, ""), + ("", ""), + ("pyOpenssl", "pyopenssl"), + ("py.Openssl", "py-openssl"), + ("py-Openssl", "py-openssl"), + ("py_Openssl", "py-openssl"), + ("zope.interface", "zope-interface"), ) - for (name, expected_name) in test_names: + for name, expected_name in test_names: parsed_name = norm_package_name(name) _print_output(name, parsed_name, expected_name) assert parsed_name == expected_name @@ -74,11 +86,11 @@ def test_norm_package_name(): def test_pypi_name_to_conda_name(): test_cases = ( - (None, ''), - ('', ''), - ('graphviz', 'python-graphviz'), + (None, ""), + ("", ""), + ("graphviz", "python-graphviz"), ) - for (name, expected_name) in test_cases: + for name, expected_name in test_cases: parsed_name = pypi_name_to_conda_name(name) _print_output(name, parsed_name, expected_name) assert parsed_name == expected_name @@ -86,16 +98,16 @@ def test_pypi_name_to_conda_name(): def test_norm_package_version(): test_cases = ( - (None, ''), - ('', ''), - ('>=2', '>=2'), - ('(>=2)', '>=2'), - (' (>=2) ', '>=2'), - ('>=2,<3', '>=2,<3'), - ('>=2, <3', '>=2,<3'), - (' (>=2, <3) ', '>=2,<3'), + (None, ""), + ("", ""), + (">=2", ">=2"), + ("(>=2)", ">=2"), + (" (>=2) ", ">=2"), + (">=2,<3", ">=2,<3"), + (">=2, <3", ">=2,<3"), + (" (>=2, <3) ", ">=2,<3"), ) - for (version, expected_version) in test_cases: + for version, expected_version in test_cases: parsed_version = norm_package_version(version) _print_output(version, parsed_version, expected_version) assert parsed_version == expected_version @@ -104,13 +116,13 @@ def test_norm_package_version(): def test_split_spec(): test_cases = ( # spec, separator, (spec_start, spec_end) - ('', ';', ('', '')), - ('start;end', ';', ('start', 'end')), - ('start ; end', ';', ('start', 'end')), - (' start ; end ', ';', ('start', 'end')), - ('start@end', '@', ('start', 'end')), - ('start @ end', '@', ('start', 'end')), - (' start @ end ', '@', ('start', 'end')), + ("", ";", ("", "")), + ("start;end", ";", ("start", "end")), + ("start ; end", ";", ("start", "end")), + (" start ; end ", ";", ("start", "end")), + ("start@end", "@", ("start", "end")), + ("start @ end", "@", ("start", "end")), + (" start @ end ", "@", ("start", "end")), ) for spec, sep, expected_output in test_cases: output = split_spec(spec, sep) @@ -120,86 +132,104 @@ def test_split_spec(): def test_parse_specification(): test_reqs = { - '': - PySpec('', [], '', '', ''), - 'requests': - PySpec('requests', [], '', '', ''), - 'requests >1.1': - PySpec('requests', [], '>1.1', '', ''), - 'requests[security]': - PySpec('requests', ['security'], '', '', ''), - 'requests[security] (>=1.1.0)': - PySpec('requests', ['security'], '>=1.1.0', '', ''), - 'requests[security]>=1.5.0': - PySpec('requests', ['security'], '>=1.5.0', '', ''), - 'requests[security] (>=4.5.0) ; something >= 27': - PySpec('requests', ['security'], '>=4.5.0', 'something >= 27', ''), - 'requests[security]>=3.3.0;something >= 2.7 ': - PySpec('requests', ['security'], '>=3.3.0', 'something >= 2.7', ''), - 'requests[security]>=3.3.0;something >= 2.7 or something_else == 1': - PySpec('requests', ['security'], '>=3.3.0', 'something >= 2.7 or something_else == 1', ''), - 'requests[security] >=3.3.0 ; something >= 2.7 or something_else == 1': - PySpec('requests', ['security'], '>=3.3.0', 'something >= 2.7 or something_else == 1', ''), - 'requests[security] (>=3.3.0) ; something >= 2.7 or something_else == 1': - PySpec('requests', ['security'], '>=3.3.0', 'something >= 2.7 or something_else == 1', ''), - 'requests[security] (>=3.3.0<4.4) ; something >= 2.7 or something_else == 1': - PySpec('requests', ['security'], '>=3.3.0<4.4', 'something >= 2.7 or something_else == 1', ''), - 'pyOpenSSL>=0.14': - PySpec('pyopenssl', [], '>=0.14', '', ''), - 'py.OpenSSL>=0.14': - PySpec('py-openssl', [], '>=0.14', '', ''), - 'py-OpenSSL>=0.14': - PySpec('py-openssl', [], '>=0.14', '', ''), - 'py_OpenSSL>=0.14': - PySpec('py-openssl', [], '>=0.14', '', ''), - 'zope.interface (>3.5.0)': - PySpec('zope-interface', [], '>3.5.0', '', ''), - "A": - PySpec('a', [], '', '', ''), - "A.B-C_D": - PySpec('a-b-c-d', [], '', '', ''), - "aa": - PySpec('aa', [], '', '', ''), - "name": - PySpec('name', [], '', '', ''), - "name<=1": - PySpec('name', [], '<=1', '', ''), - "name>=3": - PySpec('name', [], '>=3', '', ''), - "name>=3,<2": - PySpec('name', [], '>=3,<2', '', ''), - " name ( >= 3, < 2 ) ": - PySpec('name', [], '>=3,<2', '', ''), - "name@http://foo.com": - PySpec('name', [], '', '', 'http://foo.com'), - " name [ fred , bar ] ( >= 3 , < 2 ) ": - PySpec('name', ['fred', 'bar'], '>=3,<2', '', ''), - " name [fred,bar] ( >= 3 , < 2 ) @ http://foo.com ; python_version=='2.7' ": - PySpec('name', ['fred', 'bar'], '>=3,<2', "python_version=='2.7'", 'http://foo.com'), - " name [fred,bar] @ http://foo.com ; python_version=='2.7' ": - PySpec('name', ['fred', 'bar'], '', "python_version=='2.7'", 'http://foo.com'), - "name[quux, strange];python_version<'2.7' and platform_version=='2'": - PySpec('name', ['quux', 'strange'], '', "python_version<'2.7' and platform_version=='2'", ''), - "name; os_name=='a' or os_name=='b'": - PySpec('name', [], '', "os_name=='a' or os_name=='b'", ''), - "name; os_name=='a' and os_name=='b' or os_name=='c'": - PySpec('name', [], '', "os_name=='a' and os_name=='b' or os_name=='c'", ''), - "name; os_name=='a' and (os_name=='b' or os_name=='c')": - PySpec('name', [], '', "os_name=='a' and (os_name=='b' or os_name=='c')", ''), - " name; os_name=='a' or os_name=='b' and os_name=='c' ": - PySpec('name', [], '', "os_name=='a' or os_name=='b' and os_name=='c'", ''), - " name ; (os_name=='a' or os_name=='b') and os_name=='c' ": - PySpec('name', [], '', "(os_name=='a' or os_name=='b') and os_name=='c'", ''), - '>=3,<2': - PySpec('', [], '>=3,<2', '', ''), - ' ( >=3 , <2 ) ': - PySpec('', [], '>=3,<2', '', ''), - '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*': - PySpec('', [], '>=2.7,!=3.0.*,!=3.1.*,!=3.2.*', '', ''), - 'name>=1.0.0-beta.1,<2.0.0': - PySpec('name', [], '>=1.0.0.beta.1,<2.0.0', '', ''), - 'name==1.0.0+localhash': - PySpec('name', [], '==1.0.0+localhash', '', ''), + "": PySpec("", [], "", "", ""), + "requests": PySpec("requests", [], "", "", ""), + "requests >1.1": PySpec("requests", [], ">1.1", "", ""), + "requests[security]": PySpec("requests", ["security"], "", "", ""), + "requests[security] (>=1.1.0)": PySpec( + "requests", ["security"], ">=1.1.0", "", "" + ), + "requests[security]>=1.5.0": PySpec( + "requests", ["security"], ">=1.5.0", "", "" + ), + "requests[security] (>=4.5.0) ; something >= 27": PySpec( + "requests", ["security"], ">=4.5.0", "something >= 27", "" + ), + "requests[security]>=3.3.0;something >= 2.7 ": PySpec( + "requests", ["security"], ">=3.3.0", "something >= 2.7", "" + ), + "requests[security]>=3.3.0;something >= 2.7 or something_else == 1": PySpec( + "requests", + ["security"], + ">=3.3.0", + "something >= 2.7 or something_else == 1", + "", + ), + "requests[security] >=3.3.0 ; something >= 2.7 or something_else == 1": PySpec( + "requests", + ["security"], + ">=3.3.0", + "something >= 2.7 or something_else == 1", + "", + ), + "requests[security] (>=3.3.0) ; something >= 2.7 or something_else == 1": PySpec( + "requests", + ["security"], + ">=3.3.0", + "something >= 2.7 or something_else == 1", + "", + ), + "requests[security] (>=3.3.0<4.4) ; something >= 2.7 or something_else == 1": PySpec( + "requests", + ["security"], + ">=3.3.0<4.4", + "something >= 2.7 or something_else == 1", + "", + ), + "pyOpenSSL>=0.14": PySpec("pyopenssl", [], ">=0.14", "", ""), + "py.OpenSSL>=0.14": PySpec("py-openssl", [], ">=0.14", "", ""), + "py-OpenSSL>=0.14": PySpec("py-openssl", [], ">=0.14", "", ""), + "py_OpenSSL>=0.14": PySpec("py-openssl", [], ">=0.14", "", ""), + "zope.interface (>3.5.0)": PySpec("zope-interface", [], ">3.5.0", "", ""), + "A": PySpec("a", [], "", "", ""), + "A.B-C_D": PySpec("a-b-c-d", [], "", "", ""), + "aa": PySpec("aa", [], "", "", ""), + "name": PySpec("name", [], "", "", ""), + "name<=1": PySpec("name", [], "<=1", "", ""), + "name>=3": PySpec("name", [], ">=3", "", ""), + "name>=3,<2": PySpec("name", [], ">=3,<2", "", ""), + " name ( >= 3, < 2 ) ": PySpec("name", [], ">=3,<2", "", ""), + "name@http://foo.com": PySpec("name", [], "", "", "http://foo.com"), + " name [ fred , bar ] ( >= 3 , < 2 ) ": PySpec( + "name", ["fred", "bar"], ">=3,<2", "", "" + ), + " name [fred,bar] ( >= 3 , < 2 ) @ http://foo.com ; python_version=='2.7' ": PySpec( + "name", ["fred", "bar"], ">=3,<2", "python_version=='2.7'", "http://foo.com" + ), + " name [fred,bar] @ http://foo.com ; python_version=='2.7' ": PySpec( + "name", ["fred", "bar"], "", "python_version=='2.7'", "http://foo.com" + ), + "name[quux, strange];python_version<'2.7' and platform_version=='2'": PySpec( + "name", + ["quux", "strange"], + "", + "python_version<'2.7' and platform_version=='2'", + "", + ), + "name; os_name=='a' or os_name=='b'": PySpec( + "name", [], "", "os_name=='a' or os_name=='b'", "" + ), + "name; os_name=='a' and os_name=='b' or os_name=='c'": PySpec( + "name", [], "", "os_name=='a' and os_name=='b' or os_name=='c'", "" + ), + "name; os_name=='a' and (os_name=='b' or os_name=='c')": PySpec( + "name", [], "", "os_name=='a' and (os_name=='b' or os_name=='c')", "" + ), + " name; os_name=='a' or os_name=='b' and os_name=='c' ": PySpec( + "name", [], "", "os_name=='a' or os_name=='b' and os_name=='c'", "" + ), + " name ; (os_name=='a' or os_name=='b') and os_name=='c' ": PySpec( + "name", [], "", "(os_name=='a' or os_name=='b') and os_name=='c'", "" + ), + ">=3,<2": PySpec("", [], ">=3,<2", "", ""), + " ( >=3 , <2 ) ": PySpec("", [], ">=3,<2", "", ""), + ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*": PySpec( + "", [], ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*", "", "" + ), + "name>=1.0.0-beta.1,<2.0.0": PySpec( + "name", [], ">=1.0.0.beta.1,<2.0.0", "", "" + ), + "name==1.0.0+localhash": PySpec("name", [], "==1.0.0+localhash", "", ""), } for req, expected_req in test_reqs.items(): parsed_req = parse_specification(req) @@ -210,17 +240,17 @@ def test_parse_specification(): def test_get_site_packages_anchor_files(): test_cases_valid = ( # dir, filename, content - ('bar.dist-info', 'RECORD', ''), - ('foo.egg-info', 'PKG-INFO', ''), - ('', 'cheese.egg-info', ''), - ('', 'spam.egg-link', ''), + ("bar.dist-info", "RECORD", ""), + ("foo.egg-info", "PKG-INFO", ""), + ("", "cheese.egg-info", ""), + ("", "spam.egg-link", ""), ) test_cases_invalid = ( - ('a.eggs', 'RECORD', ''), - ('b.eggs', 'PKG-INFO', ''), - ('', 'zoom.path', ''), - ('', 'zoom.pth', ''), - ('', 'something', ''), + ("a.eggs", "RECORD", ""), + ("b.eggs", "PKG-INFO", ""), + ("", "zoom.path", ""), + ("", "zoom.pth", ""), + ("", "something", ""), ) # Create test case dirs/files on temp folder @@ -232,7 +262,7 @@ def test_get_site_packages_anchor_files(): # Generate valid output expected_outputs = set() for folder, fname, content in test_cases_valid: - expected_output = '/'.join([ref_dir, folder, fname]).replace('//', '/') + expected_output = "/".join([ref_dir, folder, fname]).replace("//", "/") expected_outputs.add(expected_output) _print_output(outputs, expected_outputs) @@ -240,55 +270,51 @@ def test_get_site_packages_anchor_files(): def test_get_dist_file_from_egg_link(): - test_files = ( - ('egg1.egg-info', 'PKG-INFO', ''), - ) + test_files = (("egg1.egg-info", "PKG-INFO", ""),) temp_path, fpaths = _create_test_files(test_files) - temp_path2, fpaths2 = _create_test_files((('', 'egg1.egg-link', temp_path),)) + temp_path2, fpaths2 = _create_test_files((("", "egg1.egg-link", temp_path),)) - output = get_dist_file_from_egg_link(fpaths2[0], '') + output = get_dist_file_from_egg_link(fpaths2[0], "") expected_output = fpaths[0] _print_output(output, expected_output) assert output == expected_output # Test not existing path - temp_path3, fpaths3 = _create_test_files((('', 'egg2.egg-link', '/not-a-path/'),)) + temp_path3, fpaths3 = _create_test_files((("", "egg2.egg-link", "/not-a-path/"),)) with pytest.raises(EnvironmentError) as exc: - get_dist_file_from_egg_link(fpaths3[0], '') + get_dist_file_from_egg_link(fpaths3[0], "") print(exc.value) # Test existing path but no valig egg-info files temp_path4 = tempfile.mkdtemp() - temp_path4, fpaths4 = _create_test_files((('', 'egg2.egg-link', temp_path4),)) + temp_path4, fpaths4 = _create_test_files((("", "egg2.egg-link", temp_path4),)) with pytest.raises(EnvironmentError) as exc: - get_dist_file_from_egg_link(fpaths4[0], '') + get_dist_file_from_egg_link(fpaths4[0], "") print(exc.value) @pytest.mark.skipif(True, reason="Ask @goanpeca about what this test is looking for.") def test_get_python_distribution_info(): - temp_path_egg1, _ = _create_test_files(( - ('', 'bar.egg-info', 'Name: bar\n'), - )) - temp_path_egg2, _ = _create_test_files(( - ('lee.egg-info', 'PKG-INFO', 'Name: lee\n'), - )) + temp_path_egg1, _ = _create_test_files((("", "bar.egg-info", "Name: bar\n"),)) + temp_path_egg2, _ = _create_test_files( + (("lee.egg-info", "PKG-INFO", "Name: lee\n"),) + ) test_files = ( # Egg link - ('', 'boom.egg-link', '/not-a-path/'), - ('', 'bar.egg-link', temp_path_egg1), - ('', 'lee.egg-link', temp_path_egg2), + ("", "boom.egg-link", "/not-a-path/"), + ("", "bar.egg-link", temp_path_egg1), + ("", "lee.egg-link", temp_path_egg2), # Dist info - ('spam.dist-info', 'METADATA', 'Name: spam\n'), - ('spam.dist-info', 'RECORD', ''), - ('spam.dist-info', 'INSTALLER', ''), + ("spam.dist-info", "METADATA", "Name: spam\n"), + ("spam.dist-info", "RECORD", ""), + ("spam.dist-info", "INSTALLER", ""), # Egg info - ('foo.egg-info', 'METADATA', 'Name: foo\n'), + ("foo.egg-info", "METADATA", "Name: foo\n"), # Direct file - ('', 'cheese.egg-info', 'Name: cheese\n'), + ("", "cheese.egg-info", "Name: cheese\n"), ) temp_path2, fpaths = _create_test_files(test_files) - output_names = ['boom', 'bar', 'lee', 'spam', 'spam', 'spam', 'foo', 'cheese'] + output_names = ["boom", "bar", "lee", "spam", "spam", "spam", "foo", "cheese"] for i, fpath in enumerate(fpaths): output = PythonDistribution.init(temp_path2, basename(fpath), "1.1") output = output.prefix_record @@ -306,14 +332,12 @@ def test_metadata_keys(): cls = PythonDistributionMetadata for keymap in cls.SINGLE_USE_KEYS, cls.MULTIPLE_USE_KEYS: for key, value in keymap.items(): - assert key.lower().replace('-', '_') == value + assert key.lower().replace("-", "_") == value def test_metadata_process_path(): - name = 'META' - test_files = ( - ('', name, 'Name: eggs\n'), - ) + name = "META" + test_files = (("", name, "Name: eggs\n"),) temp_path, fpaths = _create_test_files(test_files) func = PythonDistributionMetadata._process_path @@ -330,7 +354,7 @@ def test_metadata_process_path(): assert output == expected_output # Test valid directory (file order) - output = func(temp_path, ['something', name, 'something-else']) + output = func(temp_path, ["something", name, "something-else"]) expected_output = fpaths[0] _print_output(output, expected_output) assert output == expected_output @@ -346,25 +370,23 @@ def test_metadata_read_metadata(): func = PythonDistributionMetadata._read_metadata # Test existing file unknown key - temp_path, fpaths = _create_test_files(( - ('', 'PKG-INFO', 'Unknown-Key: unknown\n'), - )) + temp_path, fpaths = _create_test_files( + (("", "PKG-INFO", "Unknown-Key: unknown\n"),) + ) output = func(fpaths[0]) expected_output = {} _print_output(output, expected_output) assert output == expected_output # Test existing file known key - temp_path, fpaths = _create_test_files(( - ('', 'PKG-INFO', 'Name: spam\n'), - )) + temp_path, fpaths = _create_test_files((("", "PKG-INFO", "Name: spam\n"),)) output = func(fpaths[0]) expected_output = {"name": "spam"} _print_output(output, expected_output) assert output == expected_output # Test non existing file - test_fpath = '/foo/bar/METADATA' + test_fpath = "/foo/bar/METADATA" output = func(test_fpath) expected_output = {} _print_output(output, expected_output) @@ -393,20 +415,20 @@ def test_metadata(): version = meta.version _print_output(fpath, meta._data, a, b, c, d, e, f, name, version) assert len(meta._data) - assert name == 'BeagleVote' - assert version == '1.0a2' + assert name == "BeagleVote" + assert version == "1.0a2" # Python Distributions # ----------------------------------------------------------------------------- def test_basepydist_parse_requires_file_data(): - key = 'g' + key = "g" test_cases = ( # (data, requirements, extras) - ('', ([], [])), - ('foo\n', (['foo'], [])), - ('foo\n\n[:a == "a"]\nbar\n', (['foo', 'bar; a == "a"'], ['a'])), - ('foo\n\n[a]\nbar\n', (['foo', 'bar; extra == "a"'], ['a'])), + ("", ([], [])), + ("foo\n", (["foo"], [])), + ('foo\n\n[:a == "a"]\nbar\n', (["foo", 'bar; a == "a"'], ["a"])), + ("foo\n\n[a]\nbar\n", (["foo", 'bar; extra == "a"'], ["a"])), ) func = PythonDistribution._parse_requires_file_data @@ -418,7 +440,7 @@ def test_basepydist_parse_requires_file_data(): def test_basepydist_parse_entries_file_data(): func = PythonDistribution._parse_entries_file_data - data = ''' + data = """ [a] a = cli:main_1 @@ -427,7 +449,7 @@ def test_basepydist_parse_entries_file_data(): [b.d] C = cli:MAIN_3 -''' +""" expected_output = { "a": {"a": "cli:main_1"}, "b.c": {"b": "cli:MAIN_2"}, @@ -440,10 +462,10 @@ def test_basepydist_parse_entries_file_data(): def test_basepydist_load_requires_provides_file(): - temp_path, fpaths = _create_test_files((('', 'depends.txt', 'foo\n\n[a]\nbar\n'), )) + temp_path, fpaths = _create_test_files((("", "depends.txt", "foo\n\n[a]\nbar\n"),)) dist = PythonEggInfoDistribution(temp_path, "1.8", None) - exp_req, exp_extra = (['foo', 'bar; extra == "a"'], ['a']) + exp_req, exp_extra = (["foo", 'bar; extra == "a"'], ["a"]) req, extra = dist._load_requires_provides_file() _print_output((list(sorted(req)), extra), (list(sorted(exp_req)), exp_extra)) assert (list(sorted(req)), extra) == (list(sorted(exp_req)), exp_extra) @@ -451,14 +473,16 @@ def test_basepydist_load_requires_provides_file(): def test_dist_get_paths(): content = 'foo/bar,sha256=1,"45"\nfoo/spam,,\n' - temp_path, fpaths = _create_test_files((('', 'SOURCES.txt', content), )) + temp_path, fpaths = _create_test_files((("", "SOURCES.txt", content),)) sp_dir = get_python_site_packages_short_path("2.7") dist = PythonEggInfoDistribution(temp_path, "2.7", None) output = dist.get_paths() - expected_output = [(join_url(sp_dir, "foo", "bar"), '1', 45), - (join_url(sp_dir, "foo", "spam"), None, None)] + expected_output = [ + (join_url(sp_dir, "foo", "bar"), "1", 45), + (join_url(sp_dir, "foo", "spam"), None, None), + ] _print_output(output, expected_output) assert output == expected_output @@ -472,36 +496,36 @@ def test_dist_get_paths_no_paths(): def test_get_dist_requirements(): test_files = ( - ('', 'METADATA', 'Name: spam\n'), - ('', 'requires.txt', 'foo >1.0'), + ("", "METADATA", "Name: spam\n"), + ("", "requires.txt", "foo >1.0"), ) temp_path, fpaths = _create_test_files(test_files) dist = PythonEggInfoDistribution(temp_path, "2.7", None) output = dist.get_dist_requirements() - expected_output = frozenset({'foo >1.0'}) + expected_output = frozenset({"foo >1.0"}) _print_output(output, expected_output) assert output == expected_output def test_get_extra_provides(): test_files = ( - ('', 'METADATA', 'Name: spam\n'), - ('', 'requires.txt', 'foo >1.0\n[a]\nbar\n'), + ("", "METADATA", "Name: spam\n"), + ("", "requires.txt", "foo >1.0\n[a]\nbar\n"), ) temp_path, fpaths = _create_test_files(test_files) dist = PythonEggInfoDistribution(temp_path, "2.7", None) output = dist.get_extra_provides() - expected_output = ['a'] + expected_output = ["a"] _print_output(output, expected_output) assert output == expected_output def test_get_entry_points(): test_files = ( - ('', 'METADATA', 'Name: spam\n'), - ('', 'entry_points.txt', '[console_scripts]\ncheese = cli:main\n'), + ("", "METADATA", "Name: spam\n"), + ("", "entry_points.txt", "[console_scripts]\ncheese = cli:main\n"), ) temp_path, fpaths = _create_test_files(test_files) @@ -514,9 +538,9 @@ def test_get_entry_points(): def test_pydist_check_files(): test_files = ( - ('', 'METADATA', '1'), - ('', 'RECORD', '2'), - ('', 'INSTALLER', '3'), + ("", "METADATA", "1"), + ("", "RECORD", "2"), + ("", "INSTALLER", "3"), ) # Test mandatory files found @@ -532,13 +556,13 @@ def test_pydist_check_files(): def test_python_dist_info(): test_files = ( - ('', 'METADATA', ('Name: zoom\n' - 'Requires-Python: ==2.7\n' - 'Requires-External: C\n' - ) - ), - ('', 'RECORD', 'foo/bar,sha256=1,"45"\nfoo/spam,,\n'), - ('', 'INSTALLER', ''), + ( + "", + "METADATA", + ("Name: zoom\n" "Requires-Python: ==2.7\n" "Requires-External: C\n"), + ), + ("", "RECORD", 'foo/bar,sha256=1,"45"\nfoo/spam,,\n'), + ("", "INSTALLER", ""), ) # Test mandatory files found temp_path, fpaths = _create_test_files(test_files) @@ -547,62 +571,62 @@ def test_python_dist_info(): paths = dist.get_paths() _print_output(paths) assert len(paths) == 2 - assert dist.get_python_requirements() == frozenset(['==2.7']) - assert dist.get_external_requirements() == frozenset(['C']) + assert dist.get_python_requirements() == frozenset(["==2.7"]) + assert dist.get_external_requirements() == frozenset(["C"]) def test_python_dist_info_conda_dependencies(): test_files = ( - ('', 'METADATA', ('Name: foo\n' - 'Requires-Python: >2.7,<5.0\n' - 'Requires-Dist: bar ; python_version == "2.7"\n' - 'Requires-Dist: spam ; python_version == "4.9"\n' - 'Provides-Extra: docs\n' - 'Requires-Dist: cheese >=1.0; extra == "docs"\n' - ) - ), + ( + "", + "METADATA", + ( + "Name: foo\n" + "Requires-Python: >2.7,<5.0\n" + 'Requires-Dist: bar ; python_version == "2.7"\n' + 'Requires-Dist: spam ; python_version == "4.9"\n' + "Provides-Extra: docs\n" + 'Requires-Dist: cheese >=1.0; extra == "docs"\n' + ), + ), ) temp_path, fpaths = _create_test_files(test_files) path = os.path.dirname(fpaths[0]) dist = PythonEggInfoDistribution(path, "4.9", None) depends, constrains = dist.get_conda_dependencies() - assert 'python 4.9.*' in depends - assert 'bar' not in depends - assert 'spam' in depends - assert 'cheese >=1.0' in constrains + assert "python 4.9.*" in depends + assert "bar" not in depends + assert "spam" in depends + assert "cheese >=1.0" in constrains dist = PythonEggInfoDistribution(path, "2.7", None) depends, constrains = dist.get_conda_dependencies() - assert 'python 2.7.*' in depends - assert 'bar' in depends - assert 'spam' not in depends - assert 'cheese >=1.0' in constrains + assert "python 2.7.*" in depends + assert "bar" in depends + assert "spam" not in depends + assert "cheese >=1.0" in constrains dist = PythonEggInfoDistribution(path, "3.4", None) depends, constrains = dist.get_conda_dependencies() - assert 'python 3.4.*' in depends - assert 'bar' not in depends - assert 'spam' not in depends - assert 'cheese >=1.0' in constrains + assert "python 3.4.*" in depends + assert "bar" not in depends + assert "spam" not in depends + assert "cheese >=1.0" in constrains def test_python_dist_info_conda_dependencies_2(): - test_files = ( - ('', 'METADATA', ('Name: foo\n')), - ) + test_files = (("", "METADATA", ("Name: foo\n")),) temp_path, fpaths = _create_test_files(test_files) path = os.path.dirname(fpaths[0]) dist = PythonEggInfoDistribution(path, "4.9", None) depends, constrains = dist.get_conda_dependencies() - assert 'python 4.9.*' in depends + assert "python 4.9.*" in depends def test_python_dist_info_conda_dependencies_3(): - test_files = ( - ('', 'METADATA', ('Name: foo\n')), - ) + test_files = (("", "METADATA", ("Name: foo\n")),) temp_path, fpaths = _create_test_files(test_files) path = os.path.dirname(fpaths[0]) @@ -612,9 +636,7 @@ def test_python_dist_info_conda_dependencies_3(): def test_python_dist_egg_path(): - test_files = ( - ('', 'installed-files.txt', 'foo/bar\nfoo/spam\n'), - ) + test_files = (("", "installed-files.txt", "foo/bar\nfoo/spam\n"),) temp_path, fpaths = _create_test_files(test_files) path = os.path.dirname(fpaths[0]) @@ -625,15 +647,13 @@ def test_python_dist_egg_path(): def test_python_dist_egg_fpath(): - test_files = ( - ('', 'zoom.egg-info', 'Name: Zoom\nVersion: 1.0\n'), - ) + test_files = (("", "zoom.egg-info", "Name: Zoom\nVersion: 1.0\n"),) temp_path, fpaths = _create_test_files(test_files) dist = PythonEggInfoDistribution(fpaths[0], "2.2", None) - assert dist.name == 'Zoom' - assert dist.norm_name == 'zoom' - assert dist.version == '1.0' + assert dist.name == "Zoom" + assert dist.norm_name == "zoom" + assert dist.version == "1.0" # Markers @@ -643,13 +663,13 @@ def test_evaluate_marker(): # ((marker_expr, context, extras, expected_output), ...) test_cases = ( # Valid context - ('spam == "1.0"', {'spam': '1.0'}, True), + ('spam == "1.0"', {"spam": "1.0"}, True), # Should parse as (a and b) or c - ("a=='a' and b=='b' or c=='c'", {'a': 'a', 'b': 'b', 'c': ''}, True), + ("a=='a' and b=='b' or c=='c'", {"a": "a", "b": "b", "c": ""}, True), # Overriding precedence -> a and (b or c) - ("a=='a' and (b=='b' or c=='c')", {'a': 'a', 'b': '', 'c': ''}, None), + ("a=='a' and (b=='b' or c=='c')", {"a": "a", "b": "", "c": ""}, None), # Overriding precedence -> (a or b) and c - ("(a=='a' or b=='b') and c=='c'", {'a': 'a', 'b': '', 'c': ''}, None), + ("(a=='a' or b=='b') and c=='c'", {"a": "a", "b": "", "c": ""}, None), ) for marker_expr, context, expected_output in test_cases: output = None @@ -663,9 +683,9 @@ def test_evaluate_marker(): # Test cases syntax error test_cases = ( ('spam == "1.0"', {}, None), - ('spam2 == "1.0"', {'spam': '1.0'}, None), + ('spam2 == "1.0"', {"spam": "1.0"}, None), # Malformed - ('spam2 = "1.0"', {'spam': '1.0'}, None), + ('spam2 = "1.0"', {"spam": "1.0"}, None), ) for marker_expr, context, expected_output in test_cases: with pytest.raises(SyntaxError): @@ -676,7 +696,7 @@ def test_get_default_marker_context(): context = get_default_marker_context() for key, val in context.items(): # Check deprecated keys have same value as new keys (. -> _) - if '.' in key: - other_val = context.get(key.replace('.', '_')) + if "." in key: + other_val = context.get(key.replace(".", "_")) _print_output(val, other_val) assert val == other_val diff --git a/tests/common/test_configuration.py b/tests/common/test_configuration.py index b0bdddd92e5..dff6e587e0e 100644 --- a/tests/common/test_configuration.py +++ b/tests/common/test_configuration.py @@ -1,36 +1,35 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +from os import environ, mkdir +from os.path import join +from shutil import rmtree +from tempfile import mkdtemp +from unittest import TestCase import pytest -from conda.common.io import env_var, env_vars +from pytest import raises from conda.auxlib.ish import dals from conda.common.configuration import ( Configuration, ConfigurationObject, + CustomValidationError, + InvalidTypeError, + MapParameter, MultiValidationError, ObjectParameter, ParameterFlag, ParameterLoader, PrimitiveParameter, - MapParameter, SequenceParameter, + ValidationError, YamlRawParameter, load_file_configs, - InvalidTypeError, - CustomValidationError, pretty_list, raise_errors, ) +from conda.common.io import env_var, env_vars from conda.common.serialize import yaml_round_trip_load -from conda.common.configuration import ValidationError -from os import environ, mkdir -from os.path import join -from pytest import raises -from shutil import rmtree -from tempfile import mkdtemp -from unittest import TestCase test_yaml_raw = { "file1": dals( @@ -275,21 +274,31 @@ class SampleConfiguration(Configuration): aliases=("always_yes_altname1", "yes", "always_yes_altname2"), ) changeps1 = ParameterLoader(PrimitiveParameter(True)) - proxy_servers = ParameterLoader(MapParameter(PrimitiveParameter("", element_type=str))) + proxy_servers = ParameterLoader( + MapParameter(PrimitiveParameter("", element_type=str)) + ) channels = ParameterLoader( SequenceParameter(PrimitiveParameter("", element_type=str)), aliases=("channels_altname",), ) always_an_int = ParameterLoader(PrimitiveParameter(0)) - boolean_map = ParameterLoader(MapParameter(PrimitiveParameter(False, element_type=bool))) + boolean_map = ParameterLoader( + MapParameter(PrimitiveParameter(False, element_type=bool)) + ) commented_map = ParameterLoader(MapParameter(PrimitiveParameter("", str))) - env_var_map = ParameterLoader(MapParameter(PrimitiveParameter("", str)), expandvars=True) + env_var_map = ParameterLoader( + MapParameter(PrimitiveParameter("", str)), expandvars=True + ) env_var_str = ParameterLoader(PrimitiveParameter(""), expandvars=True) - env_var_bool = ParameterLoader(PrimitiveParameter(False, element_type=bool), expandvars=True) + env_var_bool = ParameterLoader( + PrimitiveParameter(False, element_type=bool), expandvars=True + ) normal_str = ParameterLoader(PrimitiveParameter(""), expandvars=False) - env_var_list = ParameterLoader(SequenceParameter(PrimitiveParameter("", str)), expandvars=True) + env_var_list = ParameterLoader( + SequenceParameter(PrimitiveParameter("", str)), expandvars=True + ) nested_map = ParameterLoader( MapParameter(SequenceParameter(PrimitiveParameter("", element_type=str))) @@ -303,20 +312,26 @@ class SampleConfiguration(Configuration): def load_from_string_data(*seq): return { - f: YamlRawParameter.make_raw_parameters(f, yaml_round_trip_load(test_yaml_raw[f])) + f: YamlRawParameter.make_raw_parameters( + f, yaml_round_trip_load(test_yaml_raw[f]) + ) for f in seq } class ConfigurationTests(TestCase): def test_simple_merges_and_caching(self): - config = SampleConfiguration()._set_raw_data(load_from_string_data("file1", "file2")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("file1", "file2") + ) assert config.changeps1 is False assert config.always_yes is True assert config.channels == ("porky", "bugs", "elmer", "daffy", "tweety") assert config.proxy_servers == {"http": "marv", "https": "sam", "s3": "pepé"} - config = SampleConfiguration()._set_raw_data(load_from_string_data("file2", "file1")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("file2", "file1") + ) assert len(config._cache_) == 0 assert config.changeps1 is False assert len(config._cache_) == 1 @@ -440,7 +455,9 @@ def test_load_raw_configs(self): assert not_a_file not in raw_data assert raw_data[condarc]["channels"].value(None)[0].value(None) == "wile" assert raw_data[f1]["always_yes"].value(None) == "no" - assert raw_data[f2]["proxy_servers"].value(None)["http"].value(None) == "marv" + assert ( + raw_data[f2]["proxy_servers"].value(None)["http"].value(None) == "marv" + ) config = SampleConfiguration(search_path) @@ -466,14 +483,34 @@ def test_important_primitive_map_merges(self): config = SampleConfiguration()._set_raw_data(raw_data) assert config.changeps1 is False assert config.always_yes is True - assert config.channels == ("wile", "porky", "bugs", "elmer", "daffy", "foghorn", "tweety") - assert config.proxy_servers == {"http": "foghorn", "https": "sam", "s3": "porky"} + assert config.channels == ( + "wile", + "porky", + "bugs", + "elmer", + "daffy", + "foghorn", + "tweety", + ) + assert config.proxy_servers == { + "http": "foghorn", + "https": "sam", + "s3": "porky", + } raw_data = load_from_string_data("file3", "file2", "file1") config = SampleConfiguration()._set_raw_data(raw_data) assert config.changeps1 is False assert config.always_yes is True - assert config.channels == ("wile", "bugs", "daffy", "tweety", "porky", "elmer", "foghorn") + assert config.channels == ( + "wile", + "bugs", + "daffy", + "tweety", + "porky", + "elmer", + "foghorn", + ) assert config.proxy_servers == {"http": "foghorn", "https": "sly", "s3": "pepé"} raw_data = load_from_string_data("file4", "file3", "file1") @@ -492,7 +529,11 @@ def test_important_primitive_map_merges(self): config = SampleConfiguration()._set_raw_data(raw_data) assert config.changeps1 is False assert config.always_yes is True - assert config.proxy_servers == {"https": "daffy", "http": "foghorn", "s3": "porky"} + assert config.proxy_servers == { + "https": "daffy", + "http": "foghorn", + "s3": "porky", + } raw_data = load_from_string_data("file3", "file1") config = SampleConfiguration()._set_raw_data(raw_data) @@ -557,19 +598,29 @@ def test_list_merges(self): assert config.channels == ("marv", "sam", "pepé", "daffy") def test_validation(self): - config = SampleConfiguration()._set_raw_data(load_from_string_data("bad_boolean")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("bad_boolean") + ) raises(ValidationError, lambda: config.always_yes) - config = SampleConfiguration()._set_raw_data(load_from_string_data("too_many_aliases")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("too_many_aliases") + ) raises(ValidationError, lambda: config.always_yes) - config = SampleConfiguration()._set_raw_data(load_from_string_data("not_an_int")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("not_an_int") + ) raises(ValidationError, lambda: config.always_an_int) - config = SampleConfiguration()._set_raw_data(load_from_string_data("bad_boolean_map")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("bad_boolean_map") + ) raises(ValidationError, lambda: config.boolean_map) - config = SampleConfiguration()._set_raw_data(load_from_string_data("good_boolean_map")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("good_boolean_map") + ) assert config.boolean_map["a_true"] is True assert config.boolean_map["a_yes"] is True assert config.boolean_map["a_1"] is True @@ -584,7 +635,9 @@ def test_validate_all(self): config = SampleConfiguration()._set_raw_data(load_from_string_data("file1")) config.validate_configuration() - config = SampleConfiguration()._set_raw_data(load_from_string_data("bad_boolean_map")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("bad_boolean_map") + ) try: config.validate_configuration() except ValidationError as e: @@ -606,7 +659,11 @@ def test_map_parameter_must_be_map(self): proxy_servers: bad values """ ) - data = {"s1": YamlRawParameter.make_raw_parameters("s1", yaml_round_trip_load(string))} + data = { + "s1": YamlRawParameter.make_raw_parameters( + "s1", yaml_round_trip_load(string) + ) + } config = SampleConfiguration()._set_raw_data(data) raises(InvalidTypeError, config.validate_all) @@ -619,34 +676,50 @@ def test_config_resets(self): assert config.changeps1 is False def test_empty_map_parameter(self): - config = SampleConfiguration()._set_raw_data(load_from_string_data("bad_boolean_map")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("bad_boolean_map") + ) config.check_source("bad_boolean_map") def test_commented_map_parameter(self): - config = SampleConfiguration()._set_raw_data(load_from_string_data("commented_map")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("commented_map") + ) assert config.commented_map == {"key": "value"} def test_invalid_map_parameter(self): - data = {"s1": YamlRawParameter.make_raw_parameters("s1", {"proxy_servers": "blah"})} + data = { + "s1": YamlRawParameter.make_raw_parameters("s1", {"proxy_servers": "blah"}) + } config = SampleConfiguration()._set_raw_data(data) with raises(InvalidTypeError): config.proxy_servers def test_invalid_seq_parameter(self): - data = {"s1": YamlRawParameter.make_raw_parameters("s1", {"channels": "y_u_no_tuple"})} + data = { + "s1": YamlRawParameter.make_raw_parameters( + "s1", {"channels": "y_u_no_tuple"} + ) + } config = SampleConfiguration()._set_raw_data(data) with raises(InvalidTypeError): config.channels def test_expanded_variables(self): with env_vars({"EXPANDED_VAR": "itsexpanded", "BOOL_VAR": "True"}): - config = SampleConfiguration()._set_raw_data(load_from_string_data("env_vars")) + config = SampleConfiguration()._set_raw_data( + load_from_string_data("env_vars") + ) assert config.env_var_map["expanded"] == "itsexpanded" assert config.env_var_map["unexpanded"] == "$UNEXPANDED_VAR" assert config.env_var_str == "itsexpanded" assert config.env_var_bool is True assert config.normal_str == "$EXPANDED_VAR" - assert config.env_var_list == ("itsexpanded", "$UNEXPANDED_VAR", "regular_var") + assert config.env_var_list == ( + "itsexpanded", + "$UNEXPANDED_VAR", + "regular_var", + ) def test_nested(self): config = SampleConfiguration()._set_raw_data( diff --git a/tests/common/test_io.py b/tests/common/test_io.py index 95c4820e932..b713efa9550 100644 --- a/tests/common/test_io.py +++ b/tests/common/test_io.py @@ -1,11 +1,10 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - -from conda.common.io import attach_stderr_handler, captured, CaptureTarget +import sys from io import StringIO from logging import DEBUG, NOTSET, WARN, getLogger -import sys + +from conda.common.io import CaptureTarget, attach_stderr_handler, captured def test_captured(): @@ -47,7 +46,7 @@ def print_captured(*args, **kwargs): def test_attach_stderr_handler(): - name = 'abbacadabba' + name = "abbacadabba" logr = getLogger(name) assert len(logr.handlers) == 0 assert logr.level is NOTSET @@ -56,28 +55,28 @@ def test_attach_stderr_handler(): with captured() as c: attach_stderr_handler(WARN, name) - logr.warn('test message') + logr.warn("test message") logr.debug(debug_message) assert len(logr.handlers) == 1 - assert logr.handlers[0].name == 'stderr' + assert logr.handlers[0].name == "stderr" assert logr.handlers[0].level is WARN assert logr.level is NOTSET - assert c.stdout == '' - assert 'test message' in c.stderr + assert c.stdout == "" + assert "test message" in c.stderr assert debug_message not in c.stderr # round two, with debug with captured() as c: attach_stderr_handler(DEBUG, name) - logr.warn('test message') + logr.warn("test message") logr.debug(debug_message) - logr.info('info message') + logr.info("info message") assert len(logr.handlers) == 1 - assert logr.handlers[0].name == 'stderr' + assert logr.handlers[0].name == "stderr" assert logr.handlers[0].level is DEBUG assert logr.level is NOTSET - assert c.stdout == '' - assert 'test message' in c.stderr + assert c.stdout == "" + assert "test message" in c.stderr assert debug_message in c.stderr diff --git a/tests/common/test_iterators.py b/tests/common/test_iterators.py index 90418a32958..9b3dc6e5985 100644 --- a/tests/common/test_iterators.py +++ b/tests/common/test_iterators.py @@ -1,7 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from itertools import chain import warnings +from itertools import chain from conda.core.link import PrefixActionGroup @@ -69,16 +69,34 @@ def test_interleave(): from conda._vendor.toolz.itertoolz import interleave prefix_action_groups = { - "remove_menu_action_groups": PrefixActionGroup([1, 2], [], [], [], [], [], [], [], []), - "unlink_action_groups": PrefixActionGroup([], [3, 4], [], [], [], [], [], [], []), - "unregister_action_groups": PrefixActionGroup([], [], [5, 6], [], [], [], [], [], []), + "remove_menu_action_groups": PrefixActionGroup( + [1, 2], [], [], [], [], [], [], [], [] + ), + "unlink_action_groups": PrefixActionGroup( + [], [3, 4], [], [], [], [], [], [], [] + ), + "unregister_action_groups": PrefixActionGroup( + [], [], [5, 6], [], [], [], [], [], [] + ), "link_action_groups": PrefixActionGroup([], [], [], [7, 8], [], [], [], [], []), - "register_action_groups": PrefixActionGroup([], [], [], [], [9, 10], [], [], [], []), - "compile_action_groups": PrefixActionGroup([], [], [], [], [], [11, 12], [], [], []), - "make_menu_action_groups": PrefixActionGroup([], [], [], [], [], [], [13, 14], [], []), - "entry_point_action_groups": PrefixActionGroup([], [], [], [], [], [], [], [15, 16], []), - "prefix_record_groups": PrefixActionGroup([], [], [], [], [], [], [], [], [17, 18]), - "all": PrefixActionGroup(["a"], ["b"], ["c"], ["d"], ["e"], ["f"], ["g"], ["h"], ["i"]), + "register_action_groups": PrefixActionGroup( + [], [], [], [], [9, 10], [], [], [], [] + ), + "compile_action_groups": PrefixActionGroup( + [], [], [], [], [], [11, 12], [], [], [] + ), + "make_menu_action_groups": PrefixActionGroup( + [], [], [], [], [], [], [13, 14], [], [] + ), + "entry_point_action_groups": PrefixActionGroup( + [], [], [], [], [], [], [], [15, 16], [] + ), + "prefix_record_groups": PrefixActionGroup( + [], [], [], [], [], [], [], [], [17, 18] + ), + "all": PrefixActionGroup( + ["a"], ["b"], ["c"], ["d"], ["e"], ["f"], ["g"], ["h"], ["i"] + ), } # old style diff --git a/tests/common/test_path.py b/tests/common/test_path.py index 77f5ff94da9..61edfb1372b 100644 --- a/tests/common/test_path.py +++ b/tests/common/test_path.py @@ -1,11 +1,13 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - from logging import getLogger -from conda.common.path import (get_major_minor_version, missing_pyc_files, url_to_path, - win_path_backout) +from conda.common.path import ( + get_major_minor_version, + missing_pyc_files, + url_to_path, + win_path_backout, +) log = getLogger(__name__) @@ -20,23 +22,53 @@ def test_url_to_path_unix(): def test_url_to_path_windows_local(): assert url_to_path("file:///c|/WINDOWS/notepad.exe") == "c:/WINDOWS/notepad.exe" assert url_to_path("file:///C:/WINDOWS/notepad.exe") == "C:/WINDOWS/notepad.exe" - assert url_to_path("file://localhost/C|/WINDOWS/notepad.exe") == "C:/WINDOWS/notepad.exe" - assert url_to_path("file://localhost/c:/WINDOWS/notepad.exe") == "c:/WINDOWS/notepad.exe" + assert ( + url_to_path("file://localhost/C|/WINDOWS/notepad.exe") + == "C:/WINDOWS/notepad.exe" + ) + assert ( + url_to_path("file://localhost/c:/WINDOWS/notepad.exe") + == "c:/WINDOWS/notepad.exe" + ) assert url_to_path("C:\\Windows\\notepad.exe") == "C:\\Windows\\notepad.exe" - assert url_to_path("file:///C:/Program%20Files/Internet%20Explorer/iexplore.exe") == "C:/Program Files/Internet Explorer/iexplore.exe" - assert url_to_path("C:\\Program Files\\Internet Explorer\\iexplore.exe") == "C:\\Program Files\\Internet Explorer\\iexplore.exe" + assert ( + url_to_path("file:///C:/Program%20Files/Internet%20Explorer/iexplore.exe") + == "C:/Program Files/Internet Explorer/iexplore.exe" + ) + assert ( + url_to_path("C:\\Program Files\\Internet Explorer\\iexplore.exe") + == "C:\\Program Files\\Internet Explorer\\iexplore.exe" + ) def test_url_to_path_windows_unc(): - assert url_to_path("file://windowshost/windowshare/path") == "//windowshost/windowshare/path" - assert url_to_path("\\\\windowshost\\windowshare\\path") == "\\\\windowshost\\windowshare\\path" - assert url_to_path("file://windowshost\\windowshare\\path") == "//windowshost\\windowshare\\path" - assert url_to_path("file://\\\\machine\\shared_folder\\path\\conda") == "\\\\machine\\shared_folder\\path\\conda" + assert ( + url_to_path("file://windowshost/windowshare/path") + == "//windowshost/windowshare/path" + ) + assert ( + url_to_path("\\\\windowshost\\windowshare\\path") + == "\\\\windowshost\\windowshare\\path" + ) + assert ( + url_to_path("file://windowshost\\windowshare\\path") + == "//windowshost\\windowshare\\path" + ) + assert ( + url_to_path("file://\\\\machine\\shared_folder\\path\\conda") + == "\\\\machine\\shared_folder\\path\\conda" + ) def test_win_path_backout(): - assert win_path_backout("file://\\\\machine\\shared_folder\\path\\conda") == "file://machine/shared_folder/path/conda" - assert win_path_backout("file://\\\\machine\\shared\\ folder\\path\\conda") == "file://machine/shared\\ folder/path/conda" + assert ( + win_path_backout("file://\\\\machine\\shared_folder\\path\\conda") + == "file://machine/shared_folder/path/conda" + ) + assert ( + win_path_backout("file://\\\\machine\\shared\\ folder\\path\\conda") + == "file://machine/shared\\ folder/path/conda" + ) FILES = ( @@ -62,7 +94,7 @@ def test_win_path_backout(): def test_missing_pyc_files_27(): - missing = missing_pyc_files('27', FILES) + missing = missing_pyc_files("27", FILES) assert len(missing) == 10 assert tuple(m[1] for m in missing) == ( "lib/python2.7/site-packages/flask/__init__.pyc", @@ -79,7 +111,7 @@ def test_missing_pyc_files_27(): def test_missing_pyc_files_34(): - missing = missing_pyc_files('34', FILES) + missing = missing_pyc_files("34", FILES) assert len(missing) == 10 assert tuple(m[1] for m in missing) == ( "lib/python2.7/site-packages/flask/__pycache__/__init__.cpython-34.pyc", @@ -96,7 +128,7 @@ def test_missing_pyc_files_34(): def test_missing_pyc_files_35(): - missing = missing_pyc_files('35', FILES) + missing = missing_pyc_files("35", FILES) assert len(missing) == 10 assert tuple(m[1] for m in missing) == ( "lib/python2.7/site-packages/flask/__pycache__/__init__.cpython-35.pyc", diff --git a/tests/common/test_url.py b/tests/common/test_url.py index 238b8b459c6..9343a6a32b0 100644 --- a/tests/common/test_url.py +++ b/tests/common/test_url.py @@ -1,22 +1,20 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - from logging import getLogger from typing import NamedTuple, Union import pytest from conda.common.url import ( + Url, add_username_and_password, is_ip_address, is_ipv6_address, is_url, maybe_add_auth, split_scheme_auth_token, - urlparse, - Url, url_to_s3_info, + urlparse, ) log = getLogger(__name__) @@ -43,7 +41,10 @@ def test_maybe_add_auth(): def test_add_username_and_pass_to_url(): url = "http://www.conda.io:80/some/path.html?query1=1&query2=2" new_url = add_username_and_password(url, "usr", "some*/weird pass") - assert new_url == "http://usr:some%2A%2Fweird%20pass@www.conda.io:80/some/path.html?query1=1&query2=2" + assert ( + new_url + == "http://usr:some%2A%2Fweird%20pass@www.conda.io:80/some/path.html?query1=1&query2=2" + ) def test_is_url(): @@ -59,17 +60,17 @@ def test_is_url(): def test_is_ipv6_address(): - assert is_ipv6_address('::1') is True - assert is_ipv6_address('2001:db8:85a3::370:7334') is True - assert is_ipv6_address('1234:'*7+'1234') is True - assert is_ipv6_address('192.168.10.10') is False - assert is_ipv6_address('1234:' * 8 + '1234') is False + assert is_ipv6_address("::1") is True + assert is_ipv6_address("2001:db8:85a3::370:7334") is True + assert is_ipv6_address("1234:" * 7 + "1234") is True + assert is_ipv6_address("192.168.10.10") is False + assert is_ipv6_address("1234:" * 8 + "1234") is False def test_is_ip_address(): - assert is_ip_address('192.168.10.10') is True - assert is_ip_address('::1') is True - assert is_ip_address('www.google.com') is False + assert is_ip_address("192.168.10.10") is True + assert is_ip_address("::1") is True + assert is_ip_address("www.google.com") is False class UrlTest(NamedTuple): @@ -137,7 +138,10 @@ def test_urlparse(test_url_str, exp_url_obj): "https://user:pass@conda.io/path/to/somewhere", ), (UrlTest(scheme="file", path="/opt/happy/path"), "file:///opt/happy/path"), - (UrlTest(scheme="file", path="path/to/something.txt"), "file:///path/to/something.txt"), + ( + UrlTest(scheme="file", path="path/to/something.txt"), + "file:///path/to/something.txt", + ), ] diff --git a/tests/common/test_yaml.py b/tests/common/test_yaml.py index 64f9c237bb9..364737083ab 100644 --- a/tests/common/test_yaml.py +++ b/tests/common/test_yaml.py @@ -1,7 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - from logging import getLogger from conda.auxlib.ish import dals @@ -11,25 +9,30 @@ def test_dump(): - obj = dict([ - ('a_seq', [1, 2, 3]), - ('a_map', {'a_key': 'a_value'}), - ]) + obj = dict( + [ + ("a_seq", [1, 2, 3]), + ("a_map", {"a_key": "a_value"}), + ] + ) assert obj == yaml_round_trip_load(yaml_round_trip_dump(obj)) def test_seq_simple(): - test_string = dals(""" + test_string = dals( + """ a_seq: - 1 - 2 - 3 - """) - assert test_string == yaml_round_trip_dump({'a_seq': [1, 2, 3]}) + """ + ) + assert test_string == yaml_round_trip_dump({"a_seq": [1, 2, 3]}) def test_yaml_complex(): - test_string = dals(""" + test_string = dals( + """ single_bool: false single_str: no @@ -50,24 +53,25 @@ def test_yaml_complex(): field2: yes # final comment - """) + """ + ) python_structure = { - 'single_bool': False, - 'single_str': 'no', - 'a_seq_1': [ + "single_bool": False, + "single_str": "no", + "a_seq_1": [ 1, 2, 3, ], - 'a_seq_2': [ + "a_seq_2": [ 1, - {'two': 2}, + {"two": 2}, 3, ], - 'a_map': { - 'field1': True, - 'field2': 'yes', + "a_map": { + "field1": True, + "field2": "yes", }, } @@ -80,8 +84,10 @@ def test_yaml_complex(): def test_map(): - test_string = dals(""" + test_string = dals( + """ a_map: a_key: a_value - """) - assert test_string == yaml_round_trip_dump({'a_map': {'a_key': 'a_value'}}) + """ + ) + assert test_string == yaml_round_trip_dump({"a_map": {"a_key": "a_value"}}) diff --git a/tests/conda_env/__init__.py b/tests/conda_env/__init__.py index 2978dd012db..4de129cfc51 100644 --- a/tests/conda_env/__init__.py +++ b/tests/conda_env/__init__.py @@ -1,9 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from os.path import dirname, join - # remote=True is only used in two places, in tests.conda_env.test_create diff --git a/tests/conda_env/installers/test_pip.py b/tests/conda_env/installers/test_pip.py index a638b622819..d07f3917b40 100644 --- a/tests/conda_env/installers/test_pip.py +++ b/tests/conda_env/installers/test_pip.py @@ -1,17 +1,15 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import os import unittest from unittest import mock -import os -from conda_env.installers import pip from conda.exceptions import CondaValueError +from conda_env.installers import pip class PipInstallerTest(unittest.TestCase): def test_straight_install(self): - # To check that the correct file would be written written_deps = [] @@ -19,32 +17,39 @@ def log_write(text): written_deps.append(text) return mock.DEFAULT - with mock.patch.object(pip.subprocess, 'Popen') as mock_popen, \ - mock.patch.object(pip, 'pip_args') as mock_pip_args, \ - mock.patch('tempfile.NamedTemporaryFile', mock.mock_open()) as mock_namedtemp: + with mock.patch.object( + pip.subprocess, "Popen" + ) as mock_popen, mock.patch.object( + pip, "pip_args" + ) as mock_pip_args, mock.patch( + "tempfile.NamedTemporaryFile", mock.mock_open() + ) as mock_namedtemp: # Mock mock_popen.return_value.returncode = 0 - mock_pip_args.return_value = (['pip'], '9.0.1') + mock_pip_args.return_value = (["pip"], "9.0.1") mock_namedtemp.return_value.write.side_effect = log_write - mock_namedtemp.return_value.name = 'tmp-file' + mock_namedtemp.return_value.name = "tmp-file" args = mock.Mock() - root_dir = '/whatever' if os.name != 'nt' else 'C:\\whatever' - args.file = os.path.join(root_dir, 'environment.yml') + root_dir = "/whatever" if os.name != "nt" else "C:\\whatever" + args.file = os.path.join(root_dir, "environment.yml") # Run - pip.install('/some/prefix', ['foo', '-e ./bar'], args) + pip.install("/some/prefix", ["foo", "-e ./bar"], args) # Check expectations - mock_popen.assert_called_with(['pip', 'install', '-r', 'tmp-file'], - cwd=root_dir, - universal_newlines=True) + mock_popen.assert_called_with( + ["pip", "install", "-r", "tmp-file"], + cwd=root_dir, + universal_newlines=True, + ) self.assertEqual(1, mock_popen.return_value.communicate.call_count) - self.assertEqual(written_deps, ['foo\n-e ./bar']) + self.assertEqual(written_deps, ["foo\n-e ./bar"]) def test_stops_on_exception(self): - with mock.patch.object(pip.subprocess, 'Popen') as popen: + with mock.patch.object(pip.subprocess, "Popen") as popen: popen.return_value.returncode = 22 - with mock.patch.object(pip, 'pip_args') as pip_args: + with mock.patch.object(pip, "pip_args") as pip_args: # make sure that installed doesn't bail early - pip_args.return_value = (['pip'], '9.0.1') + pip_args.return_value = (["pip"], "9.0.1") - self.assertRaises(CondaValueError, pip.install, - '/some/prefix', ['foo'], None) + self.assertRaises( + CondaValueError, pip.install, "/some/prefix", ["foo"], None + ) diff --git a/tests/conda_env/specs/test_base.py b/tests/conda_env/specs/test_base.py index 8ccb3d9766d..27c79e644e3 100644 --- a/tests/conda_env/specs/test_base.py +++ b/tests/conda_env/specs/test_base.py @@ -1,16 +1,14 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from contextlib import contextmanager import random import types import unittest +from contextlib import contextmanager from unittest import mock from conda.exceptions import SpecNotFound from conda_env import specs - true_func = mock.Mock(return_value=True) false_func = mock.Mock(return_value=False) @@ -44,7 +42,7 @@ def test_dispatches_to_registered_specs(self): def test_passes_kwargs_to_all_specs(self): random_kwargs = { "foo": random.randint(100, 200), - "bar%d" % random.randint(100, 200): True + "bar%d" % random.randint(100, 200): True, } spec1, spec2 = generate_two_specs() @@ -55,17 +53,17 @@ def test_passes_kwargs_to_all_specs(self): def test_raises_exception_if_no_detection(self): spec1 = generate_two_specs()[0] - spec1.msg = 'msg' + spec1.msg = "msg" with patched_specs(spec1): with self.assertRaises(SpecNotFound): specs.detect(name="foo") def test_has_build_msg_function(self): - self.assertTrue(hasattr(specs, 'build_message')) + self.assertTrue(hasattr(specs, "build_message")) self.assertIsInstance(specs.build_message, types.FunctionType) def test_build_msg(self): - spec3 = mock.Mock(msg='error 3') - spec4 = mock.Mock(msg='error 4') + spec3 = mock.Mock(msg="error 3") + spec4 = mock.Mock(msg="error 4") spec5 = mock.Mock(msg=None) - self.assertEqual(specs.build_message([spec3, spec4, spec5]), 'error 3\nerror 4') + self.assertEqual(specs.build_message([spec3, spec4, spec5]), "error 3\nerror 4") diff --git a/tests/conda_env/specs/test_binstar.py b/tests/conda_env/specs/test_binstar.py index 6ed72b6ef4f..c31424a11b2 100644 --- a/tests/conda_env/specs/test_binstar.py +++ b/tests/conda_env/specs/test_binstar.py @@ -1,10 +1,10 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from binstar_client.errors import NotFound from pytest_mock import MockerFixture -from binstar_client.errors import NotFound -from conda_env.specs.binstar import BinstarSpec from conda_env.env import Environment +from conda_env.specs.binstar import BinstarSpec def test_name_not_present(): @@ -28,7 +28,9 @@ def test_package_not_exist(mocker: MockerFixture): mocker.patch( "conda_env.specs.binstar.BinstarSpec.binstar", new_callable=mocker.PropertyMock, - return_value=mocker.MagicMock(package=mocker.MagicMock(side_effect=NotFound("msg"))), + return_value=mocker.MagicMock( + package=mocker.MagicMock(side_effect=NotFound("msg")) + ), ) spec = BinstarSpec("darth/no-exist") @@ -41,7 +43,9 @@ def test_package_without_environment_file(mocker: MockerFixture): mocker.patch( "conda_env.specs.binstar.BinstarSpec.binstar", new_callable=mocker.PropertyMock, - return_value=mocker.MagicMock(package=mocker.MagicMock(return_value={"files": []})), + return_value=mocker.MagicMock( + package=mocker.MagicMock(return_value={"files": []}) + ), ) spec = BinstarSpec("darth/no-env-file") @@ -57,7 +61,9 @@ def test_download_environment(mocker: MockerFixture): return_value=mocker.MagicMock( package=mocker.MagicMock( return_value={ - "files": [{"type": "env", "version": "1", "basename": "environment.yml"}], + "files": [ + {"type": "env", "version": "1", "basename": "environment.yml"} + ], }, ), download=mocker.MagicMock(return_value=mocker.MagicMock(text="name: env")), @@ -80,9 +86,21 @@ def test_environment_version_sorting(mocker: MockerFixture): package=mocker.MagicMock( return_value={ "files": [ - {"type": "env", "version": "0.1.1", "basename": "environment.yml"}, - {"type": "env", "version": "0.1a.2", "basename": "environment.yml"}, - {"type": "env", "version": "0.2.0", "basename": "environment.yml"}, + { + "type": "env", + "version": "0.1.1", + "basename": "environment.yml", + }, + { + "type": "env", + "version": "0.1a.2", + "basename": "environment.yml", + }, + { + "type": "env", + "version": "0.2.0", + "basename": "environment.yml", + }, ], }, ), diff --git a/tests/conda_env/specs/test_requirements.py b/tests/conda_env/specs/test_requirements.py index 42861db45a0..62485ef9fad 100644 --- a/tests/conda_env/specs/test_requirements.py +++ b/tests/conda_env/specs/test_requirements.py @@ -1,31 +1,27 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import unittest -from .. import support_file - from conda_env import env from conda_env.specs.requirements import RequirementsSpec +from .. import support_file + class TestRequiremets(unittest.TestCase): def test_no_environment_file(self): - spec = RequirementsSpec(name=None, filename='not-a-file') + spec = RequirementsSpec(name=None, filename="not-a-file") self.assertEqual(spec.can_handle(), False) def test_no_name(self): - spec = RequirementsSpec(filename=support_file('requirements.txt')) + spec = RequirementsSpec(filename=support_file("requirements.txt")) self.assertEqual(spec.can_handle(), False) def test_req_file_and_name(self): - spec = RequirementsSpec(filename=support_file('requirements.txt'), name='env') + spec = RequirementsSpec(filename=support_file("requirements.txt"), name="env") self.assertTrue(spec.can_handle()) def test_environment(self): - spec = RequirementsSpec(filename=support_file('requirements.txt'), name='env') + spec = RequirementsSpec(filename=support_file("requirements.txt"), name="env") self.assertIsInstance(spec.environment, env.Environment) - self.assertEqual( - spec.environment.dependencies['conda'][0], - 'flask==0.10.1' - ) + self.assertEqual(spec.environment.dependencies["conda"][0], "flask==0.10.1") diff --git a/tests/conda_env/specs/test_yaml_file.py b/tests/conda_env/specs/test_yaml_file.py index cc8f56dc8cd..c1b173ace5a 100644 --- a/tests/conda_env/specs/test_yaml_file.py +++ b/tests/conda_env/specs/test_yaml_file.py @@ -1,8 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -import unittest import random +import unittest from unittest import mock from conda_env import env @@ -11,23 +10,23 @@ class TestYAMLFile(unittest.TestCase): def test_no_environment_file(self): - spec = YamlFileSpec(name=None, filename='not-a-file') + spec = YamlFileSpec(name=None, filename="not-a-file") self.assertEqual(spec.can_handle(), False) def test_environment_file_exist(self): - with mock.patch.object(env, 'from_file', return_value={}): - spec = YamlFileSpec(name=None, filename='environment.yaml') + with mock.patch.object(env, "from_file", return_value={}): + spec = YamlFileSpec(name=None, filename="environment.yaml") self.assertTrue(spec.can_handle()) def test_get_environment(self): r = random.randint(100, 200) - with mock.patch.object(env, 'from_file', return_value=r): - spec = YamlFileSpec(name=None, filename='environment.yaml') + with mock.patch.object(env, "from_file", return_value=r): + spec = YamlFileSpec(name=None, filename="environment.yaml") self.assertEqual(spec.environment, r) def test_filename(self): filename = f"filename_{random.randint(100, 200)}" - with mock.patch.object(env, 'from_file') as from_file: + with mock.patch.object(env, "from_file") as from_file: spec = YamlFileSpec(filename=filename) spec.environment from_file.assert_called_with(filename) diff --git a/tests/conda_env/support/advanced-pip/module_to_install_in_editable_mode/setup.py b/tests/conda_env/support/advanced-pip/module_to_install_in_editable_mode/setup.py index 773fcba2d4d..ea81c4fecab 100644 --- a/tests/conda_env/support/advanced-pip/module_to_install_in_editable_mode/setup.py +++ b/tests/conda_env/support/advanced-pip/module_to_install_in_editable_mode/setup.py @@ -1,9 +1,8 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from setuptools import setup setup( - name='module_to_install_in_editable_mode', + name="module_to_install_in_editable_mode", packages=[], ) diff --git a/tests/conda_env/test_cli.py b/tests/conda_env/test_cli.py index 3ebc39d203c..e5721d0c2df 100644 --- a/tests/conda_env/test_cli.py +++ b/tests/conda_env/test_cli.py @@ -1,12 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json import os import tempfile +import unittest import pytest -import unittest from conda.auxlib.compat import Utf8NamedTemporaryFile from conda.base.constants import ROOT_ENV_NAME @@ -16,15 +15,16 @@ from conda.common.serialize import yaml_safe_load from conda.core.envs_manager import list_all_known_prefixes from conda.exceptions import ( - EnvironmentLocationNotFound, CondaEnvException, EnvironmentFileExtensionNotValid, EnvironmentFileNotFound, + EnvironmentLocationNotFound, SpecNotFound, ) from conda.gateways.disk.delete import rm_rf from conda.utils import massage_arguments -from conda_env.cli.main import create_parser, do_call as do_call_conda_env +from conda_env.cli.main import create_parser +from conda_env.cli.main import do_call as do_call_conda_env # Environment names we use during our tests TEST_ENV_NAME_1 = "env-1" @@ -110,7 +110,7 @@ def escape_for_winpath(p): if p: - return p.replace('\\', '\\\\') + return p.replace("\\", "\\\\") class Commands: @@ -172,7 +172,7 @@ def run_conda_command(command, prefix, *arguments): prefix = escape_for_winpath(prefix) if arguments: arguments = list(map(escape_for_winpath, arguments)) - if command is Commands.INFO: # INFO + if command is Commands.INFO: # INFO command_line = "{} {}".format(command, " ".join(arguments)) elif command is Commands.LIST: # LIST command_line = "{} -n {} {}".format(command, prefix, " ".join(arguments)) @@ -180,6 +180,7 @@ def run_conda_command(command, prefix, *arguments): command_line = "{} -y -q -n {} {}".format(command, prefix, " ".join(arguments)) from conda.auxlib.compat import shlex_split_unicode + commands = shlex_split_unicode(command_line) args = p.parse_args(commands) context._set_argparse_args(args) @@ -189,18 +190,17 @@ def run_conda_command(command, prefix, *arguments): return c.stdout, c.stderr -def create_env(content, filename='environment.yml'): - with open(filename, 'w') as fenv: +def create_env(content, filename="environment.yml"): + with open(filename, "w") as fenv: fenv.write(content) -def remove_env_file(filename='environment.yml'): +def remove_env_file(filename="environment.yml"): os.remove(filename) @pytest.mark.integration class IntegrationTests(unittest.TestCase): - def setUp(self): rm_rf("environment.yml") run_env_command(Commands.ENV_REMOVE, TEST_ENV_NAME_1) @@ -218,22 +218,22 @@ def tearDown(self): run_env_command(Commands.ENV_REMOVE, f"envjson-{env_nb}") def test_conda_env_create_no_file(self): - ''' + """ Test `conda env create` without an environment.yml file Should fail - ''' + """ try: run_env_command(Commands.ENV_CREATE, None) except Exception as e: self.assertIsInstance(e, EnvironmentFileNotFound) def test_conda_env_create_no_existent_file(self): - ''' + """ Test `conda env create --file=not_a_file.txt` with a file that does not exist. - ''' + """ try: - run_env_command(Commands.ENV_CREATE, None, '--file', 'not_a_file.txt') + run_env_command(Commands.ENV_CREATE, None, "--file", "not_a_file.txt") except Exception as e: self.assertIsInstance(e, EnvironmentFileNotFound) @@ -243,7 +243,9 @@ def test_conda_env_create_no_existent_file_with_name(self): exist. """ try: - run_env_command(Commands.ENV_CREATE, None, "--file", "not_a_file.txt", "-n" "foo") + run_env_command( + Commands.ENV_CREATE, None, "--file", "not_a_file.txt", "-n" "foo" + ) except Exception as e: self.assertIsInstance(e, EnvironmentFileNotFound) @@ -253,7 +255,7 @@ def test_create_valid_remote_env(self): This tests the `remote_origin` command line argument. """ - run_env_command(Commands.ENV_CREATE, None, 'conda-test/env-42') + run_env_command(Commands.ENV_CREATE, None, "conda-test/env-42") self.assertTrue(env_is_created(TEST_ENV_NAME_42)) o, e = run_conda_command(Commands.INFO, None, "--json") @@ -264,10 +266,10 @@ def test_create_valid_remote_env(self): ) def test_create_valid_env(self): - ''' + """ Creates an environment.yml file and creates and environment with it - ''' + """ create_env(ENVIRONMENT_1) run_env_command(Commands.ENV_CREATE, None) @@ -281,7 +283,7 @@ def test_create_valid_env(self): def test_create_dry_run_yaml(self): create_env(ENVIRONMENT_1) - o, e = run_env_command(Commands.ENV_CREATE, None, '--dry-run') + o, e = run_env_command(Commands.ENV_CREATE, None, "--dry-run") self.assertFalse(env_is_created(TEST_ENV_NAME_1)) # Find line where the YAML output starts (stdout might change if plugins involved) @@ -292,34 +294,44 @@ def test_create_dry_run_yaml(self): else: pytest.fail("Didn't find YAML data in output") - output = yaml_safe_load('\n'.join(lines[lineno:])) - assert output['name'] == 'env-1' - assert len(output['dependencies']) > 0 + output = yaml_safe_load("\n".join(lines[lineno:])) + assert output["name"] == "env-1" + assert len(output["dependencies"]) > 0 def test_create_dry_run_json(self): create_env(ENVIRONMENT_1) - o, e = run_env_command(Commands.ENV_CREATE, None, '--dry-run', '--json') + o, e = run_env_command(Commands.ENV_CREATE, None, "--dry-run", "--json") self.assertFalse(env_is_created(TEST_ENV_NAME_1)) output = json.loads(o) - assert output.get('name') == 'env-1' - assert len(output['dependencies']) + assert output.get("name") == "env-1" + assert len(output["dependencies"]) def test_create_valid_env_with_variables(self): - ''' + """ Creates an environment.yml file and creates and environment with it - ''' + """ create_env(ENVIRONMENT_1_WITH_VARIABLES) run_env_command(Commands.ENV_CREATE, None) self.assertTrue(env_is_created(TEST_ENV_NAME_1)) o, e = run_env_command( - Commands.ENV_CONFIG, TEST_ENV_NAME_1, "vars", "list", "--json", "-n", TEST_ENV_NAME_1 + Commands.ENV_CONFIG, + TEST_ENV_NAME_1, + "vars", + "list", + "--json", + "-n", + TEST_ENV_NAME_1, ) output_env_vars = json.loads(o) - assert output_env_vars == {'DUDE': 'woah', "SWEET": "yaaa", "API_KEY": "AaBbCcDd===EeFf"} + assert output_env_vars == { + "DUDE": "woah", + "SWEET": "yaaa", + "API_KEY": "AaBbCcDd===EeFf", + } o, e = run_conda_command(Commands.INFO, None, "--json") parsed = json.loads(o) @@ -341,9 +353,9 @@ def test_conda_env_create_empty_file(self): @pytest.mark.integration def test_conda_env_create_http(self): - ''' + """ Test `conda env create --file=https://some-website.com/environment.yml` - ''' + """ run_env_command( Commands.ENV_CREATE, None, @@ -372,7 +384,7 @@ def test_name(self): Test that --name can overide the `name` key inside an environment.yml """ create_env(ENVIRONMENT_1) - env_name = 'smoke-gh-254' + env_name = "smoke-gh-254" # It might be the case that you need to run this test more than once! try: @@ -381,8 +393,7 @@ def test_name(self): pass try: - run_env_command(Commands.ENV_CREATE, 'environment.yml', "-n", - env_name) + run_env_command(Commands.ENV_CREATE, "environment.yml", "-n", env_name) except Exception as e: print(e) @@ -390,7 +401,7 @@ def test_name(self): parsed = json.loads(o) self.assertNotEqual( - len([env for env in parsed['envs'] if env.endswith(env_name)]), 0 + len([env for env in parsed["envs"] if env.endswith(env_name)]), 0 ) def test_create_valid_env_json_output(self): @@ -399,7 +410,9 @@ def test_create_valid_env_json_output(self): Check the json output """ create_env(ENVIRONMENT_1) - stdout, stderr = run_env_command(Commands.ENV_CREATE, "envjson-1", "--quiet", "--json") + stdout, stderr = run_env_command( + Commands.ENV_CREATE, "envjson-1", "--quiet", "--json" + ) output = json.loads(stdout) assert output["success"] is True assert len(output["actions"]["LINK"]) > 0 @@ -411,7 +424,9 @@ def test_create_valid_env_with_conda_and_pip_json_output(self): Check the json output """ create_env(ENVIRONMENT_PYTHON_PIP_CLICK) - stdout, stderr = run_env_command(Commands.ENV_CREATE, "envjson-2", "--quiet", "--json") + stdout, stderr = run_env_command( + Commands.ENV_CREATE, "envjson-2", "--quiet", "--json" + ) output = json.loads(stdout) assert len(output["actions"]["LINK"]) > 0 assert output["actions"]["PIP"][0].startswith("click") @@ -424,7 +439,9 @@ def test_update_env_json_output(self): create_env(ENVIRONMENT_1) run_env_command(Commands.ENV_CREATE, "envjson-3", "--json") create_env(ENVIRONMENT_2) - stdout, stderr = run_env_command(Commands.ENV_UPDATE, "envjson-3", "--quiet", "--json") + stdout, stderr = run_env_command( + Commands.ENV_UPDATE, "envjson-3", "--quiet", "--json" + ) output = json.loads(stdout) assert output["success"] is True assert len(output["actions"]["LINK"]) > 0 @@ -438,7 +455,9 @@ def test_update_env_only_pip_json_output(self): create_env(ENVIRONMENT_PYTHON_PIP_CLICK) run_env_command(Commands.ENV_CREATE, "envjson-4", "--json") create_env(ENVIRONMENT_PYTHON_PIP_CLICK_ATTRS) - stdout, stderr = run_env_command(Commands.ENV_UPDATE, "envjson-4", "--quiet", "--json") + stdout, stderr = run_env_command( + Commands.ENV_UPDATE, "envjson-4", "--quiet", "--json" + ) output = json.loads(stdout) assert output["success"] is True # No conda actions (FETCH/LINK), only pip @@ -454,7 +473,9 @@ def test_update_env_no_action_json_output(self): """ create_env(ENVIRONMENT_PYTHON_PIP_CLICK) run_env_command(Commands.ENV_CREATE, "envjson-5", "--json") - stdout, stderr = run_env_command(Commands.ENV_UPDATE, "envjson-5", "--quiet", "--json") + stdout, stderr = run_env_command( + Commands.ENV_UPDATE, "envjson-5", "--quiet", "--json" + ) output = json.loads(stdout) assert output["message"] == "All requested packages already installed." @@ -469,23 +490,61 @@ def test_remove_dry_run(self): def test_set_unset_env_vars(self): create_env(ENVIRONMENT_1) run_env_command(Commands.ENV_CREATE, None) - env_name = 'env-1' - run_env_command(Commands.ENV_CONFIG, env_name, "vars", "set", "DUDE=woah", "SWEET=yaaa", "API_KEY=AaBbCcDd===EeFf", "-n", env_name) - o, e = run_env_command(Commands.ENV_CONFIG, env_name, "vars", "list", "--json", '-n', env_name) + env_name = "env-1" + run_env_command( + Commands.ENV_CONFIG, + env_name, + "vars", + "set", + "DUDE=woah", + "SWEET=yaaa", + "API_KEY=AaBbCcDd===EeFf", + "-n", + env_name, + ) + o, e = run_env_command( + Commands.ENV_CONFIG, env_name, "vars", "list", "--json", "-n", env_name + ) output_env_vars = json.loads(o) - assert output_env_vars == {'DUDE': 'woah', "SWEET": "yaaa", "API_KEY": "AaBbCcDd===EeFf"} + assert output_env_vars == { + "DUDE": "woah", + "SWEET": "yaaa", + "API_KEY": "AaBbCcDd===EeFf", + } - run_env_command(Commands.ENV_CONFIG, env_name, "vars", "unset", "DUDE", "SWEET", "API_KEY", '-n', env_name) - o, e = run_env_command(Commands.ENV_CONFIG, env_name, "vars", "list", "--json", '-n', env_name) + run_env_command( + Commands.ENV_CONFIG, + env_name, + "vars", + "unset", + "DUDE", + "SWEET", + "API_KEY", + "-n", + env_name, + ) + o, e = run_env_command( + Commands.ENV_CONFIG, env_name, "vars", "list", "--json", "-n", env_name + ) output_env_vars = json.loads(o) assert output_env_vars == {} def test_set_unset_env_vars_env_no_exist(self): create_env(ENVIRONMENT_1) run_env_command(Commands.ENV_CREATE, None) - env_name = 'env-11' + env_name = "env-11" try: - run_env_command(Commands.ENV_CONFIG, env_name, "vars", "set", "DUDE=woah", "SWEET=yaaa", "API_KEY=AaBbCcDd===EeFf", "-n", env_name) + run_env_command( + Commands.ENV_CONFIG, + env_name, + "vars", + "set", + "DUDE=woah", + "SWEET=yaaa", + "API_KEY=AaBbCcDd===EeFf", + "-n", + env_name, + ) except Exception as e: self.assertIsInstance(e, EnvironmentLocationNotFound) @@ -511,8 +570,7 @@ def env_is_created(env_name): from os.path import basename for prefix in list_all_known_prefixes(): - name = (ROOT_ENV_NAME if prefix == context.root_dir else - basename(prefix)) + name = ROOT_ENV_NAME if prefix == context.root_dir else basename(prefix) if name == env_name: return True @@ -522,10 +580,10 @@ def env_is_created(env_name): @pytest.mark.integration class NewIntegrationTests(unittest.TestCase): """ - This is integration test for conda env - make sure all instruction on online documentation works - Please refer to link below - http://conda.pydata.org/docs/using/envs.html#export-the-environment-file + This is integration test for conda env + make sure all instruction on online documentation works + Please refer to link below + http://conda.pydata.org/docs/using/envs.html#export-the-environment-file """ def setUp(self): @@ -542,7 +600,7 @@ def tearDown(self): def test_env_export(self): """ - Test conda env export + Test conda env export """ run_conda_command(Commands.CREATE, TEST_ENV_NAME_2, "flask") @@ -579,7 +637,7 @@ def test_env_export(self): def test_env_export_with_variables(self): """ - Test conda env export + Test conda env export """ run_conda_command(Commands.CREATE, TEST_ENV_NAME_2, "flask") @@ -611,18 +669,20 @@ def test_env_export_with_variables(self): run_env_command(Commands.ENV_CREATE, None, "--file", env_yaml.name) self.assertTrue(env_is_created(TEST_ENV_NAME_2)) - snowflake, e = run_env_command(Commands.ENV_EXPORT, TEST_ENV_NAME_2, "--no-builds") + snowflake, e = run_env_command( + Commands.ENV_EXPORT, TEST_ENV_NAME_2, "--no-builds" + ) assert not e.strip() env_description = yaml_safe_load(snowflake) - assert len(env_description['variables']) - assert env_description['variables'].keys() + assert len(env_description["variables"]) + assert env_description["variables"].keys() run_env_command(Commands.ENV_REMOVE, TEST_ENV_NAME_2) assert not env_is_created(TEST_ENV_NAME_2) def test_env_export_json(self): """ - Test conda env export + Test conda env export """ run_conda_command(Commands.CREATE, TEST_ENV_NAME_2, "flask") @@ -645,21 +705,22 @@ def test_env_export_json(self): ( snowflake, e, - ) = run_env_command(Commands.ENV_EXPORT, TEST_ENV_NAME_2, "--no-builds", "--json") + ) = run_env_command( + Commands.ENV_EXPORT, TEST_ENV_NAME_2, "--no-builds", "--json" + ) assert not e.strip() env_description = json.loads(snowflake) - assert len(env_description['dependencies']) - for spec_str in env_description['dependencies']: - assert spec_str.count('=') == 1 + assert len(env_description["dependencies"]) + for spec_str in env_description["dependencies"]: + assert spec_str.count("=") == 1 run_env_command(Commands.ENV_REMOVE, TEST_ENV_NAME_2) assert not env_is_created(TEST_ENV_NAME_2) - def test_list(self): """ - Test conda list -e and conda create from txt + Test conda list -e and conda create from txt """ run_conda_command(Commands.CREATE, TEST_ENV_NAME_2) @@ -673,7 +734,9 @@ def test_list(self): env_txt.close() run_env_command(Commands.ENV_REMOVE, TEST_ENV_NAME_2) self.assertFalse(env_is_created(TEST_ENV_NAME_2)) - run_conda_command(Commands.CREATE, TEST_ENV_NAME_2, "--file " + env_txt.name) + run_conda_command( + Commands.CREATE, TEST_ENV_NAME_2, "--file " + env_txt.name + ) self.assertTrue(env_is_created(TEST_ENV_NAME_2)) snowflake2, e = run_conda_command(Commands.LIST, TEST_ENV_NAME_2, "-e") @@ -681,9 +744,10 @@ def test_list(self): def test_export_multi_channel(self): """ - Test conda env export + Test conda env export """ from conda.core.prefix_data import PrefixData + PrefixData._cache_.clear() run_conda_command(Commands.CREATE, TEST_ENV_NAME_2, "python=3.5") self.assertTrue(env_is_created(TEST_ENV_NAME_2)) @@ -715,7 +779,7 @@ def test_export_multi_channel(self): def test_non_existent_file(self): with self.assertRaises(EnvironmentFileNotFound): - run_env_command(Commands.ENV_CREATE, None, "--file", 'i_do_not_exist.yml') + run_env_command(Commands.ENV_CREATE, None, "--file", "i_do_not_exist.yml") def test_invalid_extensions(self): with Utf8NamedTemporaryFile(mode="w", suffix=".ymla", delete=False) as env_yaml: @@ -723,6 +787,5 @@ def test_invalid_extensions(self): run_env_command(Commands.ENV_CREATE, None, "--file", env_yaml.name) - -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/conda_env/test_create.py b/tests/conda_env/test_create.py index 6702cf76242..237835f7ec5 100644 --- a/tests/conda_env/test_create.py +++ b/tests/conda_env/test_create.py @@ -1,7 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - import unittest from logging import Handler, getLogger from os.path import exists, join @@ -49,7 +47,8 @@ def package_is_installed(prefix, spec, pip=None): prefix_recs = tuple(PrefixData(prefix, pip_interop_enabled=pip).query(spec)) if len(prefix_recs) > 1: raise AssertionError( - "Multiple packages installed.%s" % (dashlist(prec.dist_str() for prec in prefix_recs)) + "Multiple packages installed.%s" + % (dashlist(prec.dist_str() for prec in prefix_recs)) ) is_installed = bool(len(prefix_recs)) if is_installed and pip is True: @@ -87,14 +86,18 @@ def tearDown(self): def test_create_update(self): with make_temp_envs_dir() as envs_dir: with env_var( - "CONDA_ENVS_DIRS", envs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ENVS_DIRS", + envs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): env_name = str(uuid4())[:8] prefix = join(envs_dir, env_name) python_path = join(prefix, PYTHON_BINARY) run_command( - Commands.CREATE, env_name, support_file("example/environment_pinned.yml") + Commands.CREATE, + env_name, + support_file("example/environment_pinned.yml"), ) assert exists(python_path) assert package_is_installed(prefix, "flask=2.0.2") @@ -126,14 +129,18 @@ def test_create_update(self): def test_create_host_port(self): with make_temp_envs_dir() as envs_dir: with env_var( - "CONDA_ENVS_DIRS", envs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ENVS_DIRS", + envs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): env_name = str(uuid4())[:8] prefix = join(envs_dir, env_name) python_path = join(prefix, PYTHON_BINARY) run_command( - Commands.CREATE, env_name, support_file("example/environment_host_port.yml") + Commands.CREATE, + env_name, + support_file("example/environment_host_port.yml"), ) assert exists(python_path) assert package_is_installed(prefix, "flask=2.0.3") @@ -167,7 +174,9 @@ def test_create_advanced_pip(self): assert exists(python_path) PrefixData._cache_.clear() assert package_is_installed(prefix, "argh", pip=True) - assert package_is_installed(prefix, "module-to-install-in-editable-mode", pip=True) + assert package_is_installed( + prefix, "module-to-install-in-editable-mode", pip=True + ) try: assert package_is_installed(prefix, "six", pip=True) except AssertionError: @@ -178,7 +187,9 @@ def test_create_advanced_pip(self): def test_create_empty_env(self): with make_temp_envs_dir() as envs_dir: with env_var( - "CONDA_ENVS_DIRS", envs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ENVS_DIRS", + envs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): env_name = str(uuid4())[:8] prefix = join(envs_dir, env_name) @@ -193,16 +204,28 @@ def test_create_empty_env(self): def test_create_env_default_packages(self): with make_temp_envs_dir() as envs_dir: with env_var( - "CONDA_ENVS_DIRS", envs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ENVS_DIRS", + envs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): # set packages run_conda_command( - CondaCommands.CONFIG, envs_dir, "--add", "create_default_packages", "pip" + CondaCommands.CONFIG, + envs_dir, + "--add", + "create_default_packages", + "pip", ) run_conda_command( - CondaCommands.CONFIG, envs_dir, "--add", "create_default_packages", "flask" + CondaCommands.CONFIG, + envs_dir, + "--add", + "create_default_packages", + "flask", + ) + stdout, stderr, _ = run_conda_command( + CondaCommands.CONFIG, envs_dir, "--show" ) - stdout, stderr, _ = run_conda_command(CondaCommands.CONFIG, envs_dir, "--show") yml_obj = yaml_round_trip_load(stdout) assert yml_obj["create_default_packages"] == ["flask", "pip"] @@ -212,7 +235,9 @@ def test_create_env_default_packages(self): env_name = str(uuid4())[:8] prefix = join(envs_dir, env_name) - run_command(Commands.CREATE, env_name, support_file("env_with_dependencies.yml")) + run_command( + Commands.CREATE, env_name, support_file("env_with_dependencies.yml") + ) assert exists(prefix) assert package_is_installed(prefix, "python=2") assert package_is_installed(prefix, "pytz") @@ -226,16 +251,28 @@ def test_create_env_default_packages(self): def test_create_env_no_default_packages(self): with make_temp_envs_dir() as envs_dir: with env_var( - "CONDA_ENVS_DIRS", envs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_ENVS_DIRS", + envs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): # set packages run_conda_command( - CondaCommands.CONFIG, envs_dir, "--add", "create_default_packages", "pip" + CondaCommands.CONFIG, + envs_dir, + "--add", + "create_default_packages", + "pip", ) run_conda_command( - CondaCommands.CONFIG, envs_dir, "--add", "create_default_packages", "flask" + CondaCommands.CONFIG, + envs_dir, + "--add", + "create_default_packages", + "flask", + ) + stdout, stderr, _ = run_conda_command( + CondaCommands.CONFIG, envs_dir, "--show" ) - stdout, stderr, _ = run_conda_command(CondaCommands.CONFIG, envs_dir, "--show") yml_obj = yaml_round_trip_load(stdout) assert yml_obj["create_default_packages"] == ["flask", "pip"] @@ -260,7 +297,9 @@ def test_create_env_no_default_packages(self): # removed from class to be able to accept pytest fixture def test_create_update_remote_env_file(support_file_server_port): with make_temp_envs_dir() as envs_dir: - with env_var("CONDA_ENVS_DIRS", envs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_ENVS_DIRS", envs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): env_name = str(uuid4())[:8] prefix = join(envs_dir, env_name) python_path = join(prefix, PYTHON_BINARY) @@ -269,7 +308,9 @@ def test_create_update_remote_env_file(support_file_server_port): Commands.CREATE, env_name, support_file( - "example/environment_pinned.yml", remote=True, port=support_file_server_port + "example/environment_pinned.yml", + remote=True, + port=support_file_server_port, ), ) assert exists(python_path) diff --git a/tests/conda_env/test_env.py b/tests/conda_env/test_env.py index 8d2d65d4806..0fa098a4880 100644 --- a/tests/conda_env/test_env.py +++ b/tests/conda_env/test_env.py @@ -1,25 +1,24 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import os -from os.path import join import random import unittest +from os.path import join from unittest.mock import patch from uuid import uuid4 -from conda.core.prefix_data import PrefixData from conda.base.context import conda_tests_ctxt_mgmt_def_pol -from conda.exceptions import CondaHTTPError, EnvironmentFileNotFound -from conda.models.match_spec import MatchSpec +from conda.common.compat import on_win from conda.common.io import env_vars from conda.common.serialize import yaml_round_trip_load -from conda.common.compat import on_win - -from . import support_file -from .utils import make_temp_envs_dir, Commands, run_command +from conda.core.prefix_data import PrefixData +from conda.exceptions import CondaHTTPError, EnvironmentFileNotFound +from conda.models.match_spec import MatchSpec +from conda_env.env import from_environment from tests.test_utils import is_prefix_activated_PATHwise -from conda_env.env import from_environment +from . import support_file +from .utils import Commands, make_temp_envs_dir, run_command PYTHON_BINARY = "python.exe" if on_win else "bin/python" @@ -47,15 +46,15 @@ def get_environment(filename): def get_simple_environment(): - return get_environment('simple.yml') + return get_environment("simple.yml") def get_valid_keys_environment(): - return get_environment('valid_keys.yml') + return get_environment("valid_keys.yml") def get_invalid_keys_environment(): - return get_environment('invalid_keys.yml') + return get_environment("invalid_keys.yml") class from_file_TestCase(unittest.TestCase): @@ -65,17 +64,17 @@ def test_returns_Environment(self): def test_retains_full_filename(self): e = get_simple_environment() - self.assertEqual(support_file('simple.yml'), e.filename) + self.assertEqual(support_file("simple.yml"), e.filename) def test_with_pip(self): - e = env.from_file(support_file('with-pip.yml')) - assert 'pip' in e.dependencies - assert 'foo' in e.dependencies['pip'] - assert 'baz' in e.dependencies['pip'] + e = env.from_file(support_file("with-pip.yml")) + assert "pip" in e.dependencies + assert "foo" in e.dependencies["pip"] + assert "baz" in e.dependencies["pip"] @pytest.mark.timeout(20) def test_add_pip(self): - e = env.from_file(support_file('add-pip.yml')) + e = env.from_file(support_file("add-pip.yml")) expected = { "conda": ["pip", "car"], "pip": ["foo", "baz"], @@ -105,7 +104,12 @@ def test_envvars(self): e = get_environment("channels_with_envvars.yml") self.assertEqual( set(e.channels), - {'https://localhost/t/aaa-12345/stable', 'https://localhost/t/12345-aaa/stable', 'conda-forge', 'defaults'} + { + "https://localhost/t/aaa-12345/stable", + "https://localhost/t/12345-aaa/stable", + "conda-forge", + "defaults", + }, ) if current_conda_token: os.environ["CONDA_TOKEN"] = current_conda_token @@ -155,9 +159,7 @@ def test_args_are_wildcarded(self): self.assertEqual(e.dependencies, expected) def test_other_tips_of_dependencies_are_supported(self): - e = env.Environment( - dependencies=['nltk', {'pip': ['foo', 'bar']}] - ) + e = env.Environment(dependencies=["nltk", {"pip": ["foo", "bar"]}]) expected = { "conda": ["nltk", "pip"], "pip": ["foo", "bar"], @@ -171,11 +173,11 @@ def test_channels_default_to_empty_list(self): def test_add_channels(self): e = env.Environment() - e.add_channels(['dup', 'dup', 'unique']) - self.assertEqual(e.channels, ['dup', 'unique']) + e.add_channels(["dup", "dup", "unique"]) + self.assertEqual(e.channels, ["dup", "unique"]) def test_remove_channels(self): - e = env.Environment(channels=['channel']) + e = env.Environment(channels=["channel"]) e.remove_channels() self.assertEqual(e.channels, []) @@ -186,12 +188,14 @@ def test_channels_are_provided_by_kwarg(self): def test_to_dict_returns_dictionary_of_data(self): random_name = f"random{random.randint(100, 200)}" - e = env.Environment(name=random_name, channels=["javascript"], dependencies=["nodejs"]) + e = env.Environment( + name=random_name, channels=["javascript"], dependencies=["nodejs"] + ) expected = { - 'name': random_name, - 'channels': ['javascript'], - 'dependencies': ['nodejs'] + "name": random_name, + "channels": ["javascript"], + "dependencies": ["nodejs"], } self.assertEqual(e.to_dict(), expected) @@ -202,16 +206,24 @@ def test_to_dict_returns_just_name_if_only_thing_present(self): def test_to_yaml_returns_yaml_parseable_string(self): random_name = f"random{random.randint(100, 200)}" - e = env.Environment(name=random_name, channels=["javascript"], dependencies=["nodejs"]) + e = env.Environment( + name=random_name, channels=["javascript"], dependencies=["nodejs"] + ) - expected = {"name": random_name, "channels": ["javascript"], "dependencies": ["nodejs"]} + expected = { + "name": random_name, + "channels": ["javascript"], + "dependencies": ["nodejs"], + } actual = yaml_round_trip_load(StringIO(e.to_yaml())) self.assertEqual(expected, actual) def test_to_yaml_returns_proper_yaml(self): random_name = f"random{random.randint(100, 200)}" - e = env.Environment(name=random_name, channels=["javascript"], dependencies=["nodejs"]) + e = env.Environment( + name=random_name, channels=["javascript"], dependencies=["nodejs"] + ) expected = "\n".join( [ @@ -229,42 +241,40 @@ def test_to_yaml_returns_proper_yaml(self): def test_to_yaml_takes_stream(self): random_name = f"random{random.randint(100, 200)}" - e = env.Environment(name=random_name, channels=["javascript"], dependencies=["nodejs"]) + e = env.Environment( + name=random_name, channels=["javascript"], dependencies=["nodejs"] + ) s = FakeStream() e.to_yaml(stream=s) - expected = "\n".join([ - 'name: %s' % random_name, - 'channels:', - ' - javascript', - 'dependencies:', - ' - nodejs', - '', - ]) + expected = "\n".join( + [ + "name: %s" % random_name, + "channels:", + " - javascript", + "dependencies:", + " - nodejs", + "", + ] + ) assert expected == s.output def test_can_add_dependencies_to_environment(self): e = get_simple_environment() - e.dependencies.add('bar') + e.dependencies.add("bar") s = FakeStream() e.to_yaml(stream=s) - expected = "\n".join([ - 'name: nlp', - 'dependencies:', - ' - nltk', - ' - bar', - '' - ]) + expected = "\n".join(["name: nlp", "dependencies:", " - nltk", " - bar", ""]) assert expected == s.output def test_dependencies_update_after_adding(self): e = get_simple_environment() - assert 'bar' not in e.dependencies['conda'] - e.dependencies.add('bar') - assert 'bar' in e.dependencies['conda'] + assert "bar" not in e.dependencies["conda"] + e.dependencies.add("bar") + assert "bar" in e.dependencies["conda"] def test_valid_keys(self): e = get_valid_keys_environment() @@ -275,12 +285,12 @@ def test_valid_keys(self): def test_invalid_keys(self): e = get_invalid_keys_environment() e_dict = e.to_dict() - assert 'name' in e_dict + assert "name" in e_dict assert len(e_dict) == 1 class DirectoryTestCase(unittest.TestCase): - directory = support_file('example') + directory = support_file("example") def setUp(self): self.original_working_dir = os.getcwd() @@ -293,46 +303,46 @@ def test_returns_env_object(self): self.assertIsInstance(self.env, env.Environment) def test_has_expected_name(self): - self.assertEqual('test', self.env.name) + self.assertEqual("test", self.env.name) def test_has_dependencies(self): - self.assertEqual(1, len(self.env.dependencies['conda'])) - assert 'numpy' in self.env.dependencies['conda'] + self.assertEqual(1, len(self.env.dependencies["conda"])) + assert "numpy" in self.env.dependencies["conda"] class load_from_directory_example_TestCase(DirectoryTestCase): - directory = support_file('example') + directory = support_file("example") class load_from_directory_example_yaml_TestCase(DirectoryTestCase): - directory = support_file('example-yaml') + directory = support_file("example-yaml") class load_from_directory_recursive_TestCase(DirectoryTestCase): - directory = support_file('foo/bar') + directory = support_file("foo/bar") class load_from_directory_recursive_two_TestCase(DirectoryTestCase): - directory = support_file('foo/bar/baz') + directory = support_file("foo/bar/baz") class load_from_directory_trailing_slash_TestCase(DirectoryTestCase): - directory = support_file('foo/bar/baz/') + directory = support_file("foo/bar/baz/") class load_from_directory_TestCase(unittest.TestCase): def test_raises_when_unable_to_find(self): with self.assertRaises(EnvironmentFileNotFound): - env.load_from_directory('/path/to/unknown/env-spec') + env.load_from_directory("/path/to/unknown/env-spec") def test_raised_exception_has_environment_yml_as_file(self): with self.assertRaises(EnvironmentFileNotFound) as e: - env.load_from_directory('/path/to/unknown/env-spec') - self.assertEqual(e.exception.filename, 'environment.yml') + env.load_from_directory("/path/to/unknown/env-spec") + self.assertEqual(e.exception.filename, "environment.yml") class LoadEnvFromFileAndSaveTestCase(unittest.TestCase): - env_path = support_file(os.path.join('saved-env', 'environment.yml')) + env_path = support_file(os.path.join("saved-env", "environment.yml")) def setUp(self): with open(self.env_path, "rb") as fp: @@ -344,28 +354,28 @@ def tearDown(self): fp.write(self.original_file_contents) def test_expected_default_conditions(self): - self.assertEqual(1, len(self.env.dependencies['conda'])) + self.assertEqual(1, len(self.env.dependencies["conda"])) def test(self): - self.env.dependencies.add('numpy') + self.env.dependencies.add("numpy") self.env.save() e = env.load_from_directory(self.env_path) - self.assertEqual(2, len(e.dependencies['conda'])) - assert 'numpy' in e.dependencies['conda'] + self.assertEqual(2, len(e.dependencies["conda"])) + assert "numpy" in e.dependencies["conda"] class EnvironmentSaveTestCase(unittest.TestCase): - env_file = support_file('saved.yml') + env_file = support_file("saved.yml") def tearDown(self): if os.path.exists(self.env_file): os.unlink(self.env_file) def test_creates_file_on_save(self): - self.assertFalse(os.path.exists(self.env_file), msg='sanity check') + self.assertFalse(os.path.exists(self.env_file), msg="sanity check") - e = env.Environment(filename=self.env_file, name='simple') + e = env.Environment(filename=self.env_file, name="simple") e.save() self.assertTrue(os.path.exists(self.env_file)) @@ -391,42 +401,47 @@ class SaveExistingEnvTestCase(unittest.TestCase): @pytest.mark.integration def test_create_advanced_pip(self): with make_temp_envs_dir() as envs_dir: - with env_vars({ - 'CONDA_ENVS_DIRS': envs_dir, - 'CONDA_DLL_SEARCH_MODIFICATION_ENABLE': 'true', - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_ENVS_DIRS": envs_dir, + "CONDA_DLL_SEARCH_MODIFICATION_ENABLE": "true", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): env_name = str(uuid4())[:8] - run_command(Commands.CREATE, env_name, - support_file('pip_argh.yml')) - out_file = join(envs_dir, 'test_env.yaml') + run_command(Commands.CREATE, env_name, support_file("pip_argh.yml")) + out_file = join(envs_dir, "test_env.yaml") # make sure that the export reconsiders the presence of pip interop being enabled PrefixData._cache_.clear() - with env_vars({ - 'CONDA_ENVS_DIRS': envs_dir, - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_ENVS_DIRS": envs_dir, + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): # note: out of scope of pip interop var. Should be enabling conda pip interop itself. run_command(Commands.EXPORT, env_name, out_file) with open(out_file) as f: d = yaml_round_trip_load(f) - assert {'pip': ['argh==0.26.2']} in d['dependencies'] + assert {"pip": ["argh==0.26.2"]} in d["dependencies"] class TestFromEnvironment(unittest.TestCase): def test_from_history(self): # We're not testing that get_requested_specs_map() actually works # assume it gives us back a dict of MatchSpecs - with patch('conda.history.History.get_requested_specs_map') as m: + with patch("conda.history.History.get_requested_specs_map") as m: m.return_value = { - 'python': MatchSpec('python=3'), - 'pytest': MatchSpec('pytest!=3.7.3'), - 'mock': MatchSpec('mock'), - 'yaml': MatchSpec('yaml>=0.1') + "python": MatchSpec("python=3"), + "pytest": MatchSpec("pytest!=3.7.3"), + "mock": MatchSpec("mock"), + "yaml": MatchSpec("yaml>=0.1"), } - out = from_environment('mock_env', 'mock_prefix', from_history=True) - assert "yaml[version='>=0.1']" in out.to_dict()['dependencies'] - assert "pytest!=3.7.3" in out.to_dict()['dependencies'] - assert len(out.to_dict()['dependencies']) == 4 + out = from_environment("mock_env", "mock_prefix", from_history=True) + assert "yaml[version='>=0.1']" in out.to_dict()["dependencies"] + assert "pytest!=3.7.3" in out.to_dict()["dependencies"] + assert len(out.to_dict()["dependencies"]) == 4 m.assert_called() diff --git a/tests/conda_env/test_pip_util.py b/tests/conda_env/test_pip_util.py index 60c07f368c8..7abbcc7d85d 100644 --- a/tests/conda_env/test_pip_util.py +++ b/tests/conda_env/test_pip_util.py @@ -1,7 +1,7 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import pytest + from conda_env.pip_util import get_pip_installed_packages pip_output_attrs = """ diff --git a/tests/conda_env/utils.py b/tests/conda_env/utils.py index b75a5ae021b..c9c4780e1d7 100644 --- a/tests/conda_env/utils.py +++ b/tests/conda_env/utils.py @@ -1,18 +1,16 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from argparse import ArgumentParser from contextlib import contextmanager from tempfile import mkdtemp from conda.gateways.disk.delete import rm_rf - +from conda.utils import massage_arguments from conda_env.cli.main import do_call as do_call_conda_env from conda_env.cli.main_create import configure_parser as create_configure_parser -from conda_env.cli.main_update import configure_parser as update_configure_parser from conda_env.cli.main_export import configure_parser as export_configure_parser +from conda_env.cli.main_update import configure_parser as update_configure_parser -from conda.utils import massage_arguments class Commands: CREATE = "create" @@ -41,7 +39,7 @@ def run_command(command, env_name, *arguments): args = [command, "-n", env_name, "-f"] + arguments p = ArgumentParser() - sub_parsers = p.add_subparsers(metavar='command', dest='cmd') + sub_parsers = p.add_subparsers(metavar="command", dest="cmd") parser_config[command](sub_parsers) args = p.parse_args(args) diff --git a/tests/conftest.py b/tests/conftest.py index 318a74993ec..fe927687e1d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -6,7 +6,7 @@ import pytest from . import http_test_server -from .fixtures_jlap import package_server, package_repository_base # NOQA +from .fixtures_jlap import package_repository_base, package_server # NOQA pytest_plugins = ( # Add testing fixtures and internal pytest plugins here diff --git a/tests/core/test_envs_manager.py b/tests/core/test_envs_manager.py index b74b9f4d082..a17b0aeb5c0 100644 --- a/tests/core/test_envs_manager.py +++ b/tests/core/test_envs_manager.py @@ -1,37 +1,38 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from logging import getLogger import os +from logging import getLogger from os.path import isdir, join, lexists from tempfile import gettempdir from types import SimpleNamespace from unittest import TestCase -from uuid import uuid4 from unittest.mock import patch +from uuid import uuid4 + +import pytest from conda.auxlib.collection import AttrDict from conda.base.constants import PREFIX_MAGIC_FILE -from conda.base.context import context, reset_context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context, reset_context from conda.common.compat import on_win from conda.common.io import env_var -from conda.common.path import paths_equal, expand -from conda.core.envs_manager import list_all_known_prefixes, register_env, \ - get_user_environments_txt_file, \ - unregister_env, _clean_environments_txt +from conda.common.path import expand, paths_equal +from conda.core.envs_manager import ( + _clean_environments_txt, + get_user_environments_txt_file, + list_all_known_prefixes, + register_env, + unregister_env, +) from conda.gateways.disk import mkdir_p from conda.gateways.disk.delete import rm_rf from conda.gateways.disk.read import yield_lines from conda.gateways.disk.update import touch -import pytest - - log = getLogger(__name__) class EnvsManagerUnitTests(TestCase): - def setUp(self): tempdirdir = gettempdir() dirname = str(uuid4())[:8] @@ -39,7 +40,6 @@ def setUp(self): mkdir_p(self.prefix) assert isdir(self.prefix) - def tearDown(self): rm_rf(self.prefix) assert not lexists(self.prefix) @@ -50,19 +50,39 @@ def test_register_unregister_location_env(self): not os.path.exists(user_environments_txt_file) or user_environments_txt_file == os.devnull ): - pytest.skip(f"user environments.txt file {user_environments_txt_file} does not exist") + pytest.skip( + f"user environments.txt file {user_environments_txt_file} does not exist" + ) - gascon_location = join(self.prefix, 'gascon') + gascon_location = join(self.prefix, "gascon") touch(join(gascon_location, PREFIX_MAGIC_FILE), mkdir=True) assert gascon_location not in list_all_known_prefixes() touch(user_environments_txt_file, mkdir=True, sudo_safe=True) register_env(gascon_location) assert gascon_location in yield_lines(user_environments_txt_file) - assert len(tuple(x for x in yield_lines(user_environments_txt_file) if paths_equal(gascon_location, x))) == 1 + assert ( + len( + tuple( + x + for x in yield_lines(user_environments_txt_file) + if paths_equal(gascon_location, x) + ) + ) + == 1 + ) register_env(gascon_location) # should be completely idempotent - assert len(tuple(x for x in yield_lines(user_environments_txt_file) if x == gascon_location)) == 1 + assert ( + len( + tuple( + x + for x in yield_lines(user_environments_txt_file) + if x == gascon_location + ) + ) + == 1 + ) unregister_env(gascon_location) assert gascon_location not in list_all_known_prefixes() @@ -70,26 +90,34 @@ def test_register_unregister_location_env(self): assert gascon_location not in list_all_known_prefixes() def test_prefix_cli_flag(self): - envs_dirs = (join(self.prefix, 'first-envs-dir'), join(self.prefix, 'seconds-envs-dir')) - with env_var('CONDA_ENVS_DIRS', os.pathsep.join(envs_dirs), stack_callback=conda_tests_ctxt_mgmt_def_pol): - + envs_dirs = ( + join(self.prefix, "first-envs-dir"), + join(self.prefix, "seconds-envs-dir"), + ) + with env_var( + "CONDA_ENVS_DIRS", + os.pathsep.join(envs_dirs), + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): # even if prefix doesn't exist, it can be a target prefix - reset_context((), argparse_args=AttrDict(prefix='./blarg', func='create')) - target_prefix = join(os.getcwd(), 'blarg') + reset_context((), argparse_args=AttrDict(prefix="./blarg", func="create")) + target_prefix = join(os.getcwd(), "blarg") assert context.target_prefix == target_prefix assert not isdir(target_prefix) def test_rewrite_environments_txt_file(self): - mkdir_p(join(self.prefix, 'conda-meta')) - touch(join(self.prefix, 'conda-meta', 'history')) - doesnt_exist = join(self.prefix, 'blarg') - environments_txt_path = join(self.prefix, 'environments.txt') - with open(environments_txt_path, 'w') as fh: - fh.write(self.prefix + '\n') - fh.write(doesnt_exist + '\n') + mkdir_p(join(self.prefix, "conda-meta")) + touch(join(self.prefix, "conda-meta", "history")) + doesnt_exist = join(self.prefix, "blarg") + environments_txt_path = join(self.prefix, "environments.txt") + with open(environments_txt_path, "w") as fh: + fh.write(self.prefix + "\n") + fh.write(doesnt_exist + "\n") cleaned_1 = _clean_environments_txt(environments_txt_path) assert cleaned_1 == (self.prefix,) - with patch('conda.core.envs_manager._rewrite_environments_txt') as _rewrite_patch: + with patch( + "conda.core.envs_manager._rewrite_environments_txt" + ) as _rewrite_patch: cleaned_2 = _clean_environments_txt(environments_txt_path) assert cleaned_2 == (self.prefix,) assert _rewrite_patch.call_count == 0 @@ -98,7 +126,9 @@ def test_rewrite_environments_txt_file(self): @patch("conda.core.envs_manager.context") @patch("conda.core.envs_manager.get_user_environments_txt_file") @patch("conda.core.envs_manager._clean_environments_txt") -def test_list_all_known_prefixes_with_permission_error(mock_clean_env, mock_get_user_env, mock_context, tmp_path): +def test_list_all_known_prefixes_with_permission_error( + mock_clean_env, mock_get_user_env, mock_context, tmp_path +): # Mock context myenv_dir = tmp_path / "envs" myenv_dir.mkdir() diff --git a/tests/core/test_index.py b/tests/core/test_index.py index 74164c59262..f23dec14191 100644 --- a/tests/core/test_index.py +++ b/tests/core/test_index.py @@ -1,17 +1,20 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - from logging import getLogger from unittest import TestCase import pytest from conda.base.constants import DEFAULT_CHANNELS -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol, non_x86_machines -from conda.common.compat import on_win, on_mac, on_linux +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context, non_x86_machines +from conda.common.compat import on_linux, on_mac, on_win from conda.common.io import env_vars -from conda.core.index import check_allowlist, get_index, get_reduced_index, _supplement_index_with_system +from conda.core.index import ( + _supplement_index_with_system, + check_allowlist, + get_index, + get_reduced_index, +) from conda.exceptions import ChannelNotAllowed from conda.models.channel import Channel from conda.models.enums import PackageType @@ -20,13 +23,17 @@ log = getLogger(__name__) + def test_check_allowlist(): allowlist = ( - 'defaults', - 'conda-forge', - 'https://beta.conda.anaconda.org/conda-test' + "defaults", + "conda-forge", + "https://beta.conda.anaconda.org/conda-test", ) - with env_vars({'CONDA_ALLOWLIST_CHANNELS': ','.join(allowlist)}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + {"CONDA_ALLOWLIST_CHANNELS": ",".join(allowlist)}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with pytest.raises(ChannelNotAllowed): get_index(("conda-canary",)) @@ -45,9 +52,7 @@ def test_supplement_index_with_system(): _supplement_index_with_system(index) has_virtual_pkgs = { - rec.name - for rec in index - if rec.package_type == PackageType.VIRTUAL_SYSTEM + rec.name for rec in index if rec.package_type == PackageType.VIRTUAL_SYSTEM }.issuperset if on_win: assert has_virtual_pkgs({"__win"}) @@ -72,11 +77,11 @@ def test_supplement_index_with_system_archspec(): def test_supplement_index_with_system_cuda(clear_cuda_version): index = {} - with env_vars({'CONDA_OVERRIDE_CUDA': '3.2'}): + with env_vars({"CONDA_OVERRIDE_CUDA": "3.2"}): _supplement_index_with_system(index) - cuda_pkg = next(iter(_ for _ in index if _.name == '__cuda')) - assert cuda_pkg.version == '3.2' + cuda_pkg = next(iter(_ for _ in index if _.name == "__cuda")) + assert cuda_pkg.version == "3.2" assert cuda_pkg.package_type == PackageType.VIRTUAL_SYSTEM @@ -92,23 +97,26 @@ def test_supplement_index_with_system_osx(): @pytest.mark.skipif(not on_linux, reason="linux-only test") -@pytest.mark.parametrize("release_str,version", [ - ("1.2.3.4", "1.2.3.4"), # old numbering system +@pytest.mark.parametrize( + "release_str,version", + [ + ("1.2.3.4", "1.2.3.4"), # old numbering system ("4.2", "4.2"), ("4.2.1", "4.2.1"), ("4.2.0-42-generic", "4.2.0"), ("5.4.89+", "5.4.89"), ("5.5-rc1", "5.5"), - ("9.1.a", "9.1"), # should probably be "0" - ("9.1.a.2", "9.1"), # should probably be "0" + ("9.1.a", "9.1"), # should probably be "0" + ("9.1.a.2", "9.1"), # should probably be "0" ("9.a.1", "0"), - ]) + ], +) def test_supplement_index_with_system_linux(release_str, version): index = {} - with env_vars({'CONDA_OVERRIDE_LINUX': release_str}): + with env_vars({"CONDA_OVERRIDE_LINUX": release_str}): _supplement_index_with_system(index) - linux_pkg = next(iter(_ for _ in index if _.name == '__linux')) + linux_pkg = next(iter(_ for _ in index if _.name == "__linux")) assert linux_pkg.version == version assert linux_pkg.package_type == PackageType.VIRTUAL_SYSTEM @@ -116,31 +124,30 @@ def test_supplement_index_with_system_linux(release_str, version): @pytest.mark.skipif(on_win or on_mac, reason="linux-only test") def test_supplement_index_with_system_glibc(): index = {} - with env_vars({'CONDA_OVERRIDE_GLIBC': '2.10'}): + with env_vars({"CONDA_OVERRIDE_GLIBC": "2.10"}): _supplement_index_with_system(index) - glibc_pkg = next(iter(_ for _ in index if _.name == '__glibc')) - assert glibc_pkg.version == '2.10' + glibc_pkg = next(iter(_ for _ in index if _.name == "__glibc")) + assert glibc_pkg.version == "2.10" assert glibc_pkg.package_type == PackageType.VIRTUAL_SYSTEM @pytest.mark.integration class GetIndexIntegrationTests(TestCase): - def test_get_index_linux64_platform(self): - linux64 = 'linux-64' + linux64 = "linux-64" index = get_index(platform=linux64) for dist, record in index.items(): assert platform_in_record(linux64, record), (linux64, record.url) def test_get_index_osx64_platform(self): - osx64 = 'osx-64' + osx64 = "osx-64" index = get_index(platform=osx64) for dist, record in index.items(): assert platform_in_record(osx64, record), (osx64, record.url) def test_get_index_win64_platform(self): - win64 = 'win-64' + win64 = "win-64" index = get_index(platform=win64) for dist, record in index.items(): assert platform_in_record(win64, record), (win64, record.url) @@ -148,7 +155,11 @@ def test_get_index_win64_platform(self): @pytest.mark.integration class ReducedIndexTests(TestCase): - def test_basic_get_reduced_index(self): - get_reduced_index(None, (Channel('defaults'), Channel('conda-test')), context.subdirs, - (MatchSpec('flask'), ), 'repodata.json') + get_reduced_index( + None, + (Channel("defaults"), Channel("conda-test")), + context.subdirs, + (MatchSpec("flask"),), + "repodata.json", + ) diff --git a/tests/core/test_initialize.py b/tests/core/test_initialize.py index 2935ec711ce..4b6b52e0876 100644 --- a/tests/core/test_initialize.py +++ b/tests/core/test_initialize.py @@ -1,16 +1,16 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from distutils.sysconfig import get_python_lib import ntpath import os -from os.path import abspath, dirname, isfile, join, realpath import sys +from distutils.sysconfig import get_python_lib +from os.path import abspath, dirname, isfile, join, realpath import pytest from conda import CONDA_PACKAGE_ROOT, CONDA_SOURCE_ROOT from conda.auxlib.ish import dals -from conda.base.context import context, reset_context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context, reset_context from conda.cli.common import stdout_json from conda.common.compat import on_win, open from conda.common.io import captured, env_var, env_vars @@ -26,11 +26,11 @@ install_conda_fish, install_conda_sh, install_conda_xsh, + install_condabin_conda_bat, make_entry_point, make_entry_point_exe, make_initialize_plan, make_install_plan, - install_condabin_conda_bat, ) from conda.exceptions import CondaValueError from conda.gateways.disk.create import create_link, mkdir_p @@ -256,11 +256,17 @@ def test_make_install_plan(verbose, mocker): }, { "function": "install_activate", - "kwargs": {"conda_prefix": "/darwin", "target_path": "/darwin/bin/activate"}, + "kwargs": { + "conda_prefix": "/darwin", + "target_path": "/darwin/bin/activate", + }, }, { "function": "install_deactivate", - "kwargs": {"conda_prefix": "/darwin", "target_path": "/darwin/bin/deactivate"}, + "kwargs": { + "conda_prefix": "/darwin", + "target_path": "/darwin/bin/deactivate", + }, }, { "function": "install_conda_sh", @@ -325,11 +331,19 @@ def test_make_initialize_plan_bash_zsh(verbose): def test_make_initialize_plan_cmd_exe(verbose): with tempdir() as conda_temp_prefix: plan = make_initialize_plan( - conda_temp_prefix, ("cmd.exe",), for_user=True, for_system=True, anaconda_prompt=True + conda_temp_prefix, + ("cmd.exe",), + for_user=True, + for_system=True, + anaconda_prompt=True, + ) + steps = tuple( + step for step in plan if step["function"] == "init_cmd_exe_registry" ) - steps = tuple(step for step in plan if step["function"] == "init_cmd_exe_registry") assert len(steps) == 2 - steps = tuple(step for step in plan if step["function"] == "install_anaconda_prompt") + steps = tuple( + step for step in plan if step["function"] == "install_anaconda_prompt" + ) assert len(steps) == 2 steps = tuple(step for step in plan if step["function"] == "init_long_path") assert len(steps) == 1 @@ -342,7 +356,9 @@ def test_make_entry_point(verbose): conda_exe_path = join(conda_temp_prefix, "Scripts", "conda-script.py") else: conda_exe_path = join(conda_temp_prefix, "bin", "conda") - result = make_entry_point(conda_exe_path, conda_prefix, "conda.entry.point", "run") + result = make_entry_point( + conda_exe_path, conda_prefix, "conda.entry.point", "run" + ) assert result == Result.MODIFIED with open(conda_exe_path) as fh: @@ -372,9 +388,12 @@ def test_make_entry_point(verbose): """ ) - result = make_entry_point(conda_exe_path, conda_prefix, "conda.entry.point", "run") + result = make_entry_point( + conda_exe_path, conda_prefix, "conda.entry.point", "run" + ) assert result == Result.NO_CHANGE + def test_make_entry_point_exe(verbose): with tempdir() as conda_temp_prefix: conda_prefix = abspath(sys.prefix) @@ -403,12 +422,16 @@ def test_install_conda_sh(verbose): activator = PosixActivator() line0, line1, line2, line3, _, remainder = created_file_contents.split("\n", 5) - assert line0 == "export CONDA_EXE='%s'" % activator.path_conversion(context.conda_exe) + assert line0 == "export CONDA_EXE='%s'" % activator.path_conversion( + context.conda_exe + ) assert line1 == "export _CE_M=''" assert line2 == "export _CE_CONDA=''" assert line3.startswith("export CONDA_PYTHON_EXE=") - with open(join(CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.sh")) as fh: + with open( + join(CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.sh") + ) as fh: original_contents = fh.read() assert remainder == original_contents @@ -426,9 +449,13 @@ def test_install_conda_fish(verbose): with open(target_path) as fh: created_file_contents = fh.read() - first_line, second_line, third_line, fourth_line, remainder = created_file_contents.split( - "\n", 4 - ) + ( + first_line, + second_line, + third_line, + fourth_line, + remainder, + ) = created_file_contents.split("\n", 4) if on_win: win_conda_exe = join(conda_prefix, "Scripts", "conda.exe") win_py_exe = join(conda_prefix, "python.exe") @@ -437,14 +464,20 @@ def test_install_conda_fish(verbose): assert third_line == 'set _CONDA_EXE (cygpath "%s")' % win_conda_exe assert fourth_line == 'set -gx CONDA_PYTHON_EXE (cygpath "%s")' % win_py_exe else: - assert first_line == 'set -gx CONDA_EXE "%s"' % join(conda_prefix, "bin", "conda") + assert first_line == 'set -gx CONDA_EXE "%s"' % join( + conda_prefix, "bin", "conda" + ) assert second_line == 'set _CONDA_ROOT "%s"' % conda_prefix - assert third_line == 'set _CONDA_EXE "%s"' % join(conda_prefix, "bin", "conda") + assert third_line == 'set _CONDA_EXE "%s"' % join( + conda_prefix, "bin", "conda" + ) assert fourth_line == 'set -gx CONDA_PYTHON_EXE "%s"' % join( conda_prefix, "bin", "python" ) - with open(join(CONDA_PACKAGE_ROOT, "shell", "etc", "fish", "conf.d", "conda.fish")) as fh: + with open( + join(CONDA_PACKAGE_ROOT, "shell", "etc", "fish", "conf.d", "conda.fish") + ) as fh: original_contents = fh.read() assert remainder == original_contents @@ -470,7 +503,9 @@ def test_install_conda_xsh(verbose): join(conda_prefix, "Scripts", "conda.exe") ) else: - assert first_line == '$CONDA_EXE = "%s"' % join(conda_prefix, "bin", "conda") + assert first_line == '$CONDA_EXE = "%s"' % join( + conda_prefix, "bin", "conda" + ) with open(join(CONDA_PACKAGE_ROOT, "shell", "conda.xsh")) as fh: original_contents = fh.read() @@ -490,9 +525,13 @@ def test_install_conda_csh(verbose): with open(target_path) as fh: created_file_contents = fh.read() - first_line, second_line, third_line, fourth_line, remainder = created_file_contents.split( - "\n", 4 - ) + ( + first_line, + second_line, + third_line, + fourth_line, + remainder, + ) = created_file_contents.split("\n", 4) if on_win: assert first_line == "setenv CONDA_EXE `cygpath %s`" % join( conda_prefix, "Scripts", "conda.exe" @@ -505,14 +544,20 @@ def test_install_conda_csh(verbose): conda_prefix, "python.exe" ) else: - assert first_line == 'setenv CONDA_EXE "%s"' % join(conda_prefix, "bin", "conda") + assert first_line == 'setenv CONDA_EXE "%s"' % join( + conda_prefix, "bin", "conda" + ) assert second_line == 'setenv _CONDA_ROOT "%s"' % conda_prefix - assert third_line == 'setenv _CONDA_EXE "%s"' % join(conda_prefix, "bin", "conda") + assert third_line == 'setenv _CONDA_EXE "%s"' % join( + conda_prefix, "bin", "conda" + ) assert fourth_line == 'setenv CONDA_PYTHON_EXE "%s"' % join( conda_prefix, "bin", "python" ) - with open(join(CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.csh")) as fh: + with open( + join(CONDA_PACKAGE_ROOT, "shell", "etc", "profile.d", "conda.csh") + ) as fh: original_contents = fh.read() assert remainder == original_contents @@ -615,7 +660,9 @@ def test_initialize_dev_bash(verbose): new_py = abspath(join(conda_temp_prefix, get_python_short_path())) mkdir_p(dirname(new_py)) create_link( - abspath(sys.executable), new_py, LinkType.hardlink if on_win else LinkType.softlink + abspath(sys.executable), + new_py, + LinkType.hardlink if on_win else LinkType.softlink, ) with captured() as c: initialize_dev( @@ -693,7 +740,9 @@ def test_initialize_dev_cmd_exe(verbose): new_py = abspath(join(conda_temp_prefix, get_python_short_path())) mkdir_p(dirname(new_py)) create_link( - abspath(sys.executable), new_py, LinkType.hardlink if on_win else LinkType.softlink + abspath(sys.executable), + new_py, + LinkType.hardlink if on_win else LinkType.softlink, ) with captured() as c: initialize_dev( @@ -967,7 +1016,9 @@ def _read_windows_registry_mock(target_path, value=None): try: target_path = r"HKEY_CURRENT_USER\Software\Microsoft\Command Processor\AutoRun" conda_prefix = "c:\\Users\\Lars\\miniconda" - with env_var("CONDA_DRY_RUN", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_DRY_RUN", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with captured() as c: initialize.init_cmd_exe_registry(target_path, conda_prefix) finally: @@ -985,9 +1036,13 @@ def _read_windows_registry_mock(target_path, value=None): try: target_path = r"HKEY_CURRENT_USER\Software\Microsoft\Command Processor\AutoRun" conda_prefix = "c:\\Users\\Lars\\miniconda" - with env_var("CONDA_DRY_RUN", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_DRY_RUN", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with captured() as c: - initialize.init_cmd_exe_registry(target_path, conda_prefix, reverse=True) + initialize.init_cmd_exe_registry( + target_path, conda_prefix, reverse=True + ) finally: initialize._read_windows_registry = orig_read_windows_registry initialize.join = orig_join @@ -1017,9 +1072,7 @@ def _write_windows_registry_mock(target_path, value, dtype): initialize.join = ntpath.join try: - target_path = ( - r"HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\FileSystem\\LongPathsEnabled" - ) + target_path = r"HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\FileSystem\\LongPathsEnabled" assert initialize._read_windows_registry(target_path)[0] == 0 initialize.init_long_path(target_path) assert initialize._read_windows_registry(target_path)[0] == 1 @@ -1037,7 +1090,10 @@ def test_init_sh_system(verbose): with open(target_path) as fh: content = fh.read().strip().splitlines() assert content[0] == "# >>> conda initialize >>>" - assert content[1] == "# !! Contents within this block are managed by 'conda init' !!" + assert ( + content[1] + == "# !! Contents within this block are managed by 'conda init' !!" + ) assert content[-1] == "# <<< conda initialize <<<" init_sh_system(target_path, conda_prefix, reverse=True) diff --git a/tests/core/test_package_cache_data.py b/tests/core/test_package_cache_data.py index 61322414f76..990bbb5eac4 100644 --- a/tests/core/test_package_cache_data.py +++ b/tests/core/test_package_cache_data.py @@ -1,12 +1,12 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import datetime import json from os.path import abspath, basename, dirname, join import pytest +import conda.core.package_cache from conda import CondaError, CondaMultiError from conda.base.constants import PACKAGE_CACHE_MAGIC_FILE from conda.base.context import conda_tests_ctxt_mgmt_def_pol @@ -28,9 +28,9 @@ from conda.testing.helpers import CHANNEL_DIR from conda.testing.integration import make_temp_package_cache -import conda.core.package_cache - -assert CHANNEL_DIR == abspath(join(dirname(__file__), "..", "data", "conda_format_repo")) +assert CHANNEL_DIR == abspath( + join(dirname(__file__), "..", "data", "conda_format_repo") +) CONDA_PKG_REPO = url_path(CHANNEL_DIR) subdir = "win-64" @@ -91,7 +91,9 @@ def test_ProgressiveFetchExtract_prefers_conda_v2_format(): # zlib is the one package in the test index that has a .conda file record if rec.name == "zlib" and rec.version == "1.2.11": break - cache_action, extract_action = ProgressiveFetchExtract.make_actions_for_record(rec) + cache_action, extract_action = ProgressiveFetchExtract.make_actions_for_record( + rec + ) assert cache_action assert cache_action.target_package_basename.endswith(".conda") assert extract_action @@ -99,7 +101,8 @@ def test_ProgressiveFetchExtract_prefers_conda_v2_format(): @pytest.mark.skipif( - on_win and datetime.datetime.now() < datetime.datetime(2020, 1, 30), reason="time bomb" + on_win and datetime.datetime.now() < datetime.datetime(2020, 1, 30), + reason="time bomb", ) def test_tar_bz2_in_pkg_cache_used_instead_of_conda_pkg(): """ @@ -148,7 +151,8 @@ def test_tar_bz2_in_pkg_cache_doesnt_overwrite_conda_pkg(): complementary .conda package replaces it if that's what is requested. """ with env_vars( - {"CONDA_SEPARATE_FORMAT_CACHE": True}, stack_callback=conda_tests_ctxt_mgmt_def_pol + {"CONDA_SEPARATE_FORMAT_CACHE": True}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): with make_temp_package_cache() as pkgs_dir: # Cache the .tar.bz2 file in the package cache and extract it @@ -188,7 +192,9 @@ def test_tar_bz2_in_pkg_cache_doesnt_overwrite_conda_pkg(): pfe.execute() - with open(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) as fh: + with open( + join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json") + ) as fh: repodata_record = json.load(fh) assert repodata_record["fn"] == zlib_conda_fn @@ -205,7 +211,8 @@ def test_conda_pkg_in_pkg_cache_doesnt_overwrite_tar_bz2(): complementary .tar.bz2 package replaces it if that's what is requested. """ with env_vars( - {"CONDA_SEPARATE_FORMAT_CACHE": True}, stack_callback=conda_tests_ctxt_mgmt_def_pol + {"CONDA_SEPARATE_FORMAT_CACHE": True}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): with make_temp_package_cache() as pkgs_dir: # Cache the .conda file in the package cache and extract it @@ -245,7 +252,9 @@ def test_conda_pkg_in_pkg_cache_doesnt_overwrite_tar_bz2(): pfe.execute() - with open(join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json")) as fh: + with open( + join(pkgs_dir, zlib_base_fn, "info", "repodata_record.json") + ) as fh: repodata_record = json.load(fh) assert repodata_record["fn"] == zlib_tar_bz2_fn @@ -274,7 +283,8 @@ def test_conda_pkg_in_pkg_cache_doesnt_overwrite_tar_bz2(): @pytest.mark.skipif( - on_win and datetime.datetime.now() < datetime.datetime(2020, 1, 30), reason="time bomb" + on_win and datetime.datetime.now() < datetime.datetime(2020, 1, 30), + reason="time bomb", ) def test_tar_bz2_in_cache_not_extracted(): """ @@ -282,7 +292,9 @@ def test_tar_bz2_in_cache_not_extracted(): .conda package is requested, the .tar.bz2 package in the cache is used by default. """ with make_temp_package_cache() as pkgs_dir: - copy(join(CHANNEL_DIR, subdir, zlib_tar_bz2_fn), join(pkgs_dir, zlib_tar_bz2_fn)) + copy( + join(CHANNEL_DIR, subdir, zlib_tar_bz2_fn), join(pkgs_dir, zlib_tar_bz2_fn) + ) pfe = ProgressiveFetchExtract((zlib_tar_bz2_prec,)) pfe.prepare() assert len(pfe.cache_actions) == 1 @@ -303,7 +315,8 @@ def test_tar_bz2_in_cache_not_extracted(): @pytest.mark.skipif( - on_win and datetime.datetime.now() < datetime.datetime(2020, 1, 30), reason="time bomb" + on_win and datetime.datetime.now() < datetime.datetime(2020, 1, 30), + reason="time bomb", ) def test_instantiating_package_cache_when_both_tar_bz2_and_conda_exist(): """ diff --git a/tests/core/test_path_actions.py b/tests/core/test_path_actions.py index 9092aef7a00..9cd76d6ecbb 100644 --- a/tests/core/test_path_actions.py +++ b/tests/core/test_path_actions.py @@ -1,26 +1,37 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause import importlib.util -from logging import getLogger import os -from os.path import basename, dirname, isdir, isfile, join, lexists, getsize import sys +import warnings +from logging import getLogger +from os.path import basename, dirname, getsize, isdir, isfile, join, lexists from tempfile import gettempdir from unittest import TestCase from uuid import uuid4 -import warnings import pytest -from conda.common.iterators import groupby_to_dict as groupby -from conda.auxlib.ish import dals from conda.auxlib.collection import AttrDict +from conda.auxlib.ish import dals from conda.base.context import context from conda.common.compat import on_win -from conda.common.path import get_bin_directory_short_path, get_python_noarch_target_path, \ - get_python_short_path, get_python_site_packages_short_path, parse_entry_point_def, pyc_path, \ - win_path_ok, explode_directories -from conda.core.path_actions import CompileMultiPycAction, CreatePythonEntryPointAction, LinkPathAction +from conda.common.iterators import groupby_to_dict as groupby +from conda.common.path import ( + explode_directories, + get_bin_directory_short_path, + get_python_noarch_target_path, + get_python_short_path, + get_python_site_packages_short_path, + parse_entry_point_def, + pyc_path, + win_path_ok, +) +from conda.core.path_actions import ( + CompileMultiPycAction, + CreatePythonEntryPointAction, + LinkPathAction, +) from conda.gateways.disk.create import create_link, mkdir_p from conda.gateways.disk.delete import rm_rf from conda.gateways.disk.link import islink @@ -33,12 +44,12 @@ log = getLogger(__name__) -def make_test_file(target_dir, suffix='', contents=''): +def make_test_file(target_dir, suffix="", contents=""): if not isdir(target_dir): mkdir_p(target_dir) fn = str(uuid4())[:8] full_path = join(target_dir, fn + suffix) - with open(full_path, 'w') as fh: + with open(full_path, "w") as fh: fh.write(contents or str(uuid4())) return full_path @@ -54,12 +65,12 @@ class PathActionsTests(TestCase): def setUp(self): tempdirdir = gettempdir() - prefix_dirname = str(uuid4())[:4] + ' ' + str(uuid4())[:4] + '-prefix' + prefix_dirname = str(uuid4())[:4] + " " + str(uuid4())[:4] + "-prefix" self.prefix = join(tempdirdir, prefix_dirname) mkdir_p(self.prefix) assert isdir(self.prefix) - pkgs_dirname = str(uuid4())[:4] + ' ' + str(uuid4())[:4] + pkgs_dirname = str(uuid4())[:4] + " " + str(uuid4())[:4] self.pkgs_dir = join(tempdirdir, pkgs_dirname) mkdir_p(self.pkgs_dir) assert isdir(self.pkgs_dir) @@ -72,53 +83,64 @@ def tearDown(self): def test_CompileMultiPycAction_generic(self): package_info = AttrDict( - package_metadata=AttrDict( - noarch=AttrDict( - type=NoarchType.generic)) + package_metadata=AttrDict(noarch=AttrDict(type=NoarchType.generic)) ) noarch = package_info.package_metadata and package_info.package_metadata.noarch assert noarch.type == NoarchType.generic - axns = CompileMultiPycAction.create_actions({}, package_info, self.prefix, None, ()) + axns = CompileMultiPycAction.create_actions( + {}, package_info, self.prefix, None, () + ) assert axns == () package_info = AttrDict(package_metadata=None) - axns = CompileMultiPycAction.create_actions({}, package_info, self.prefix, None, ()) + axns = CompileMultiPycAction.create_actions( + {}, package_info, self.prefix, None, () + ) assert axns == () - @pytest.mark.xfail(on_win, reason="pyc compilation need env on windows, see gh #8025") + @pytest.mark.xfail( + on_win, reason="pyc compilation need env on windows, see gh #8025" + ) def test_CompileMultiPycAction_noarch_python(self): if not softlink_supported(__file__, self.prefix) and on_win: pytest.skip("softlink not supported") - target_python_version = '%d.%d' % sys.version_info[:2] + target_python_version = "%d.%d" % sys.version_info[:2] sp_dir = get_python_site_packages_short_path(target_python_version) transaction_context = { - 'target_python_version': target_python_version, - 'target_site_packages_short_path': sp_dir, + "target_python_version": target_python_version, + "target_site_packages_short_path": sp_dir, } - package_info = AttrDict(package_metadata=AttrDict(noarch=AttrDict(type=NoarchType.python))) + package_info = AttrDict( + package_metadata=AttrDict(noarch=AttrDict(type=NoarchType.python)) + ) file_link_actions = [ AttrDict( - source_short_path='site-packages/something.py', - target_short_path=get_python_noarch_target_path('site-packages/something.py', sp_dir), + source_short_path="site-packages/something.py", + target_short_path=get_python_noarch_target_path( + "site-packages/something.py", sp_dir + ), ), AttrDict( - source_short_path='site-packages/another.py', - target_short_path=get_python_noarch_target_path('site-packages/another.py', sp_dir), + source_short_path="site-packages/another.py", + target_short_path=get_python_noarch_target_path( + "site-packages/another.py", sp_dir + ), ), AttrDict( # this one shouldn't get compiled - source_short_path='something.py', - target_short_path=get_python_noarch_target_path('something.py', sp_dir), + source_short_path="something.py", + target_short_path=get_python_noarch_target_path("something.py", sp_dir), ), AttrDict( # this one shouldn't get compiled - source_short_path='another.py', - target_short_path=get_python_noarch_target_path('another.py', sp_dir), + source_short_path="another.py", + target_short_path=get_python_noarch_target_path("another.py", sp_dir), ), ] - axns = CompileMultiPycAction.create_actions(transaction_context, package_info, self.prefix, - None, file_link_actions) + axns = CompileMultiPycAction.create_actions( + transaction_context, package_info, self.prefix, None, file_link_actions + ) assert len(axns) == 1 axn = axns[0] @@ -126,27 +148,53 @@ def test_CompileMultiPycAction_noarch_python(self): source_full_path0 = source_full_paths[0] source_full_path1 = source_full_paths[1] assert len(source_full_paths) == 2 - assert source_full_path0 == join(self.prefix, win_path_ok(get_python_noarch_target_path('site-packages/something.py', sp_dir))) - assert source_full_path1 == join(self.prefix, win_path_ok(get_python_noarch_target_path('site-packages/another.py', sp_dir))) + assert source_full_path0 == join( + self.prefix, + win_path_ok( + get_python_noarch_target_path("site-packages/something.py", sp_dir) + ), + ) + assert source_full_path1 == join( + self.prefix, + win_path_ok( + get_python_noarch_target_path("site-packages/another.py", sp_dir) + ), + ) target_full_paths = tuple(axn.target_full_paths) target_full_path0 = target_full_paths[0] target_full_path1 = target_full_paths[1] assert len(target_full_paths) == 2 - assert target_full_path0 == join(self.prefix, win_path_ok(pyc_path(get_python_noarch_target_path('site-packages/something.py', sp_dir), - target_python_version))) - assert target_full_path1 == join(self.prefix, win_path_ok(pyc_path(get_python_noarch_target_path('site-packages/another.py', sp_dir), - target_python_version))) + assert target_full_path0 == join( + self.prefix, + win_path_ok( + pyc_path( + get_python_noarch_target_path("site-packages/something.py", sp_dir), + target_python_version, + ) + ), + ) + assert target_full_path1 == join( + self.prefix, + win_path_ok( + pyc_path( + get_python_noarch_target_path("site-packages/another.py", sp_dir), + target_python_version, + ) + ), + ) # make .py file in prefix that will be compiled mkdir_p(dirname(source_full_path0)) - with open(source_full_path0, 'w') as fh: + with open(source_full_path0, "w") as fh: fh.write("value = 42\n") mkdir_p(dirname(source_full_path1)) - with open(source_full_path1, 'w') as fh: + with open(source_full_path1, "w") as fh: fh.write("value = 43\n") # symlink the current python - python_full_path = join(self.prefix, get_python_short_path(target_python_version)) + python_full_path = join( + self.prefix, get_python_short_path(target_python_version) + ) mkdir_p(dirname(python_full_path)) create_link(sys.executable, python_full_path, LinkType.softlink) @@ -171,24 +219,31 @@ def test_CompileMultiPycAction_noarch_python(self): def test_CreatePythonEntryPointAction_generic(self): package_info = AttrDict(package_metadata=None) - axns = CreatePythonEntryPointAction.create_actions({}, package_info, self.prefix, None) + axns = CreatePythonEntryPointAction.create_actions( + {}, package_info, self.prefix, None + ) assert axns == () def test_CreatePythonEntryPointAction_noarch_python(self): - target_python_version = '%d.%d' % sys.version_info[:2] + target_python_version = "%d.%d" % sys.version_info[:2] transaction_context = { - 'target_python_version': target_python_version, + "target_python_version": target_python_version, } - package_info = AttrDict(package_metadata=AttrDict(noarch=AttrDict( - type=NoarchType.python, - entry_points=( - 'command1=some.module:main', - 'command2=another.somewhere:go', - ), - ))) + package_info = AttrDict( + package_metadata=AttrDict( + noarch=AttrDict( + type=NoarchType.python, + entry_points=( + "command1=some.module:main", + "command2=another.somewhere:go", + ), + ) + ) + ) - axns = CreatePythonEntryPointAction.create_actions(transaction_context, package_info, - self.prefix, LinkType.hardlink) + axns = CreatePythonEntryPointAction.create_actions( + transaction_context, package_info, self.prefix, LinkType.hardlink + ) grouped_axns = groupby(lambda ax: isinstance(ax, LinkPathAction), axns) windows_exe_axns = grouped_axns.get(True, ()) assert len(windows_exe_axns) == (2 if on_win else 0) @@ -197,15 +252,15 @@ def test_CreatePythonEntryPointAction_noarch_python(self): py_ep_axn = py_ep_axns[0] - command, module, func = parse_entry_point_def('command1=some.module:main') - assert command == 'command1' + command, module, func = parse_entry_point_def("command1=some.module:main") + assert command == "command1" if on_win: target_short_path = f"{get_bin_directory_short_path()}\\{command}-script.py" else: target_short_path = f"{get_bin_directory_short_path()}/{command}" assert py_ep_axn.target_full_path == join(self.prefix, target_short_path) - assert py_ep_axn.module == module == 'some.module' - assert py_ep_axn.func == func == 'main' + assert py_ep_axn.module == module == "some.module" + assert py_ep_axn.func == func == "main" mkdir_p(dirname(py_ep_axn.target_full_path)) py_ep_axn.execute() @@ -216,7 +271,9 @@ def test_CreatePythonEntryPointAction_noarch_python(self): lines = fh.read() last_line = lines.splitlines()[-1].strip() if not on_win: - python_full_path = join(self.prefix, get_python_short_path(target_python_version)) + python_full_path = join( + self.prefix, get_python_short_path(target_python_version) + ) if " " in self.prefix: # spaces in prefix break shebang! we use this python/shell workaround # also seen in virtualenv @@ -238,7 +295,9 @@ def test_CreatePythonEntryPointAction_noarch_python(self): if on_win: windows_exe_axn = windows_exe_axns[0] target_short_path = f"{get_bin_directory_short_path()}\\{command}.exe" - assert windows_exe_axn.target_full_path == join(self.prefix, target_short_path) + assert windows_exe_axn.target_full_path == join( + self.prefix, target_short_path + ) mkdir_p(dirname(windows_exe_axn.target_full_path)) windows_exe_axn.verify() @@ -261,14 +320,22 @@ def test_simple_LinkPathAction_hardlink(self): path_type = PathType.hardlink source_path_data = PathDataV1( - _path = source_short_path, + _path=source_short_path, path_type=path_type, sha256=correct_sha256, size_in_bytes=correct_size_in_bytes, ) - axn = LinkPathAction({}, None, self.pkgs_dir, source_short_path, self.prefix, - target_short_path, LinkType.hardlink, source_path_data) + axn = LinkPathAction( + {}, + None, + self.pkgs_dir, + source_short_path, + self.prefix, + target_short_path, + LinkType.hardlink, + source_path_data, + ) assert axn.target_full_path == join(self.prefix, target_short_path) axn.verify() @@ -292,14 +359,22 @@ def test_simple_LinkPathAction_softlink(self): path_type = PathType.hardlink source_path_data = PathDataV1( - _path = source_short_path, + _path=source_short_path, path_type=path_type, sha256=correct_sha256, size_in_bytes=correct_size_in_bytes, ) - axn = LinkPathAction({}, None, self.pkgs_dir, source_short_path, self.prefix, - target_short_path, LinkType.softlink, source_path_data) + axn = LinkPathAction( + {}, + None, + self.pkgs_dir, + source_short_path, + self.prefix, + target_short_path, + LinkType.softlink, + source_path_data, + ) assert axn.target_full_path == join(self.prefix, target_short_path) axn.verify() @@ -313,9 +388,17 @@ def test_simple_LinkPathAction_softlink(self): assert lexists(source_full_path) def test_simple_LinkPathAction_directory(self): - target_short_path = join('a', 'nested', 'directory') - axn = LinkPathAction({}, None, None, None, self.prefix, - target_short_path, LinkType.directory, None) + target_short_path = join("a", "nested", "directory") + axn = LinkPathAction( + {}, + None, + None, + None, + self.prefix, + target_short_path, + LinkType.directory, + None, + ) axn.verify() axn.execute() @@ -337,14 +420,22 @@ def test_simple_LinkPathAction_copy(self): path_type = PathType.hardlink source_path_data = PathDataV1( - _path = source_short_path, + _path=source_short_path, path_type=path_type, sha256=correct_sha256, size_in_bytes=correct_size_in_bytes, ) - axn = LinkPathAction({}, None, self.pkgs_dir, source_short_path, self.prefix, - target_short_path, LinkType.copy, source_path_data) + axn = LinkPathAction( + {}, + None, + self.pkgs_dir, + source_short_path, + self.prefix, + target_short_path, + LinkType.copy, + source_path_data, + ) assert axn.target_full_path == join(self.prefix, target_short_path) axn.verify() diff --git a/tests/core/test_portability.py b/tests/core/test_portability.py index 0cfb3c7d079..7053da940b4 100644 --- a/tests/core/test_portability.py +++ b/tests/core/test_portability.py @@ -1,7 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - import os import re from logging import getLogger @@ -12,7 +10,12 @@ from conda.auxlib.ish import dals from conda.base.constants import PREFIX_PLACEHOLDER from conda.common.compat import on_win -from conda.core.portability import SHEBANG_REGEX, replace_long_shebang, update_prefix, MAX_SHEBANG_LENGTH +from conda.core.portability import ( + MAX_SHEBANG_LENGTH, + SHEBANG_REGEX, + replace_long_shebang, + update_prefix, +) from conda.models.enums import FileMode log = getLogger(__name__) @@ -54,7 +57,9 @@ def test_replace_simple_shebang_no_replacement(self): # simple shebang no replacement # NOTE: we don't do anything if the binary contains spaces! not our problem :) shebang = b"#!/simple/shebang/escaped\\ space --and --flags -x" - data = b"\n".join((shebang, self.content_line, self.content_line, self.content_line)) + data = b"\n".join( + (shebang, self.content_line, self.content_line, self.content_line) + ) new_data = replace_long_shebang(FileMode.text, data) assert data == new_data @@ -63,7 +68,9 @@ def test_replace_long_shebang_with_truncation_python(self): # executable name is 'python' shebang = b"#!/" + b"shebang/" * 100 + b"python" + b" --and --flags -x" assert len(shebang) > MAX_SHEBANG_LENGTH - data = b"\n".join((shebang, self.content_line, self.content_line, self.content_line)) + data = b"\n".join( + (shebang, self.content_line, self.content_line, self.content_line) + ) new_data = replace_long_shebang(FileMode.text, data) new_shebang = b"#!/usr/bin/env python --and --flags -x" assert len(new_shebang) < MAX_SHEBANG_LENGTH @@ -77,7 +84,9 @@ def test_replace_long_shebang_with_truncation_escaped_space(self): # executable name is 'escaped space' shebang = b"#!/" + b"shebang/" * 100 + b"escaped\\ space" + b" --and --flags -x" assert len(shebang) > MAX_SHEBANG_LENGTH - data = b"\n".join((shebang, self.content_line, self.content_line, self.content_line)) + data = b"\n".join( + (shebang, self.content_line, self.content_line, self.content_line) + ) new_data = replace_long_shebang(FileMode.text, data) new_shebang = b"#!/usr/bin/env escaped\\ space --and --flags -x" assert len(new_shebang) < MAX_SHEBANG_LENGTH @@ -91,7 +100,9 @@ def test_replace_normal_shebang_spaces_in_prefix_python(self): # executable name is 'python' shebang = b"#!/she\\ bang/python --and --flags -x" assert len(shebang) < MAX_SHEBANG_LENGTH - data = b"\n".join((shebang, self.content_line, self.content_line, self.content_line)) + data = b"\n".join( + (shebang, self.content_line, self.content_line, self.content_line) + ) new_data = replace_long_shebang(FileMode.text, data) new_shebang = b"#!/usr/bin/env python --and --flags -x" assert len(new_shebang) < MAX_SHEBANG_LENGTH @@ -105,7 +116,9 @@ def test_replace_normal_shebang_spaces_in_prefix_escaped_space(self): # executable name is 'escaped space' shebang = b"#!/she\\ bang/escaped\\ space --and --flags -x" assert len(shebang) < MAX_SHEBANG_LENGTH - data = b"\n".join((shebang, self.content_line, self.content_line, self.content_line)) + data = b"\n".join( + (shebang, self.content_line, self.content_line, self.content_line) + ) new_data = replace_long_shebang(FileMode.text, data) new_shebang = b"#!/usr/bin/env escaped\\ space --and --flags -x" assert len(new_shebang) < MAX_SHEBANG_LENGTH @@ -118,7 +131,9 @@ def test_replace_long_shebang_spaces_in_prefix(self): # long shebang with escaped spaces in prefix shebang = b"#!/" + b"she\\ bang/" * 100 + b"python --and --flags -x" assert len(shebang) > MAX_SHEBANG_LENGTH - data = b"\n".join((shebang, self.content_line, self.content_line, self.content_line)) + data = b"\n".join( + (shebang, self.content_line, self.content_line, self.content_line) + ) new_data = replace_long_shebang(FileMode.text, data) new_shebang = b"#!/usr/bin/env python --and --flags -x" assert len(new_shebang) < MAX_SHEBANG_LENGTH diff --git a/tests/core/test_prefix_data.py b/tests/core/test_prefix_data.py index 9f1e0ab008a..056f71b4dcd 100644 --- a/tests/core/test_prefix_data.py +++ b/tests/core/test_prefix_data.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from contextlib import contextmanager from os.path import isdir, join, lexists from tempfile import gettempdir @@ -9,18 +8,20 @@ import pytest +from conda.base.constants import PREFIX_STATE_FILE from conda.common.compat import on_win from conda.core.prefix_data import PrefixData, get_conda_anchor_files_and_records -from tests.data.env_metadata import ( - PATH_TEST_ENV_1, PATH_TEST_ENV_2, PATH_TEST_ENV_3, PATH_TEST_ENV_4, -) -from conda.base.constants import PREFIX_STATE_FILE +from conda.exceptions import CorruptedEnvironmentError from conda.gateways.disk import mkdir_p from conda.gateways.disk.delete import rm_rf -from conda.exceptions import CorruptedEnvironmentError - +from tests.data.env_metadata import ( + PATH_TEST_ENV_1, + PATH_TEST_ENV_2, + PATH_TEST_ENV_3, + PATH_TEST_ENV_4, +) -ENV_VARS_FILE = ''' +ENV_VARS_FILE = """ { "version": 1, "env_vars": { @@ -28,14 +29,14 @@ "ENV_TWO": "you", "ENV_THREE": "me" } -}''' +}""" def _print_output(*args): """Helper function to print output in case of failed tests.""" for arg in args: print(arg) - print('\n') + print("\n") class DummyPythonRecord: @@ -47,6 +48,7 @@ def set_on_win(val): import conda.common.path import conda.common.pkg_formats.python import conda.core.prefix_data + on_win_saved = conda.common.path.on_win win_path_ok_saved_1 = conda.core.prefix_data.win_path_ok win_path_ok_saved_2 = conda.common.pkg_formats.python.win_path_ok @@ -67,26 +69,111 @@ def set_on_win(val): def test_pip_interop_windows(): test_cases = ( - (PATH_TEST_ENV_3, - ('babel', 'backports-functools-lru-cache', 'chardet', 'cheroot', 'cherrypy', - 'cssselect', 'dask', 'django', 'django-phonenumber-field', 'django-twilio', - 'entrypoints', 'h5py', 'idna', 'jaraco-functools', 'lxml', 'more-itertools', - 'numpy', 'parsel', 'phonenumberslite', 'pluggy', 'portend', 'py', 'pyjwt', - 'pyopenssl', 'pytz', 'pywin32', 'pywin32-ctypes', 'queuelib', 'requests', - 'scrapy', 'service-identity', 'six', 'tempora', 'tox', 'urllib3', 'virtualenv', - 'w3lib') + ( + PATH_TEST_ENV_3, + ( + "babel", + "backports-functools-lru-cache", + "chardet", + "cheroot", + "cherrypy", + "cssselect", + "dask", + "django", + "django-phonenumber-field", + "django-twilio", + "entrypoints", + "h5py", + "idna", + "jaraco-functools", + "lxml", + "more-itertools", + "numpy", + "parsel", + "phonenumberslite", + "pluggy", + "portend", + "py", + "pyjwt", + "pyopenssl", + "pytz", + "pywin32", + "pywin32-ctypes", + "queuelib", + "requests", + "scrapy", + "service-identity", + "six", + "tempora", + "tox", + "urllib3", + "virtualenv", + "w3lib", + ), ), - (PATH_TEST_ENV_4, - ('asn1crypto', 'attrs', 'automat', 'babel', 'backports-functools-lru-cache', - 'cffi', 'chardet', 'cheroot', 'cherrypy', 'configparser', 'constantly', - 'cryptography', 'cssselect', 'dask', 'django', 'django-phonenumber-field', - 'django-twilio', 'entrypoints', 'enum34', 'functools32', 'h5py', 'hdf5storage', - 'hyperlink', 'idna', 'incremental', 'ipaddress', 'jaraco-functools', 'keyring', - 'lxml', 'more-itertools', 'numpy', 'parsel', 'phonenumberslite', 'pluggy', - 'portend', 'py', 'pyasn1', 'pyasn1-modules', 'pycparser', 'pydispatcher', - 'pyhamcrest', 'pyjwt', 'pyopenssl', 'pytz', 'pywin32', 'pywin32-ctypes', - 'queuelib', 'requests', 'scrapy', 'service-identity', 'six', 'tempora', 'tox', - 'twilio', 'twisted', 'urllib3', 'virtualenv', 'w3lib', 'zope-interface') + ( + PATH_TEST_ENV_4, + ( + "asn1crypto", + "attrs", + "automat", + "babel", + "backports-functools-lru-cache", + "cffi", + "chardet", + "cheroot", + "cherrypy", + "configparser", + "constantly", + "cryptography", + "cssselect", + "dask", + "django", + "django-phonenumber-field", + "django-twilio", + "entrypoints", + "enum34", + "functools32", + "h5py", + "hdf5storage", + "hyperlink", + "idna", + "incremental", + "ipaddress", + "jaraco-functools", + "keyring", + "lxml", + "more-itertools", + "numpy", + "parsel", + "phonenumberslite", + "pluggy", + "portend", + "py", + "pyasn1", + "pyasn1-modules", + "pycparser", + "pydispatcher", + "pyhamcrest", + "pyjwt", + "pyopenssl", + "pytz", + "pywin32", + "pywin32-ctypes", + "queuelib", + "requests", + "scrapy", + "service-identity", + "six", + "tempora", + "tox", + "twilio", + "twisted", + "urllib3", + "virtualenv", + "w3lib", + "zope-interface", + ), ), ) @@ -97,7 +184,7 @@ def test_pip_interop_windows(): prefixdata.load() records = prefixdata._load_site_packages() record_names = tuple(sorted(records.keys())) - print('RECORDS', record_names) + print("RECORDS", record_names) assert len(record_names), len(expected_output) _print_output(expected_output, record_names) for record_name in record_names: @@ -110,26 +197,114 @@ def test_pip_interop_windows(): def test_pip_interop_osx(): test_cases = ( - (PATH_TEST_ENV_1, - ('asn1crypto', 'babel', 'backports-functools-lru-cache', 'cffi', 'chardet', - 'cheroot', 'cherrypy', 'configparser', 'cryptography', 'cssselect', 'dask', - 'django', 'django-phonenumber-field', 'django-twilio', 'entrypoints', - 'enum34', 'h5py', 'idna', 'ipaddress', 'jaraco-functools', 'lxml', - 'more-itertools', 'numpy', 'parsel', 'phonenumberslite', 'pip', 'pluggy', - 'portend', 'py', 'pycparser', 'pyjwt', 'pyopenssl', 'pytz', 'queuelib', - 'requests', 'scrapy', 'service-identity', 'six', 'tempora', 'tox', 'twisted', - 'urllib3', 'virtualenv', 'w3lib') + ( + PATH_TEST_ENV_1, + ( + "asn1crypto", + "babel", + "backports-functools-lru-cache", + "cffi", + "chardet", + "cheroot", + "cherrypy", + "configparser", + "cryptography", + "cssselect", + "dask", + "django", + "django-phonenumber-field", + "django-twilio", + "entrypoints", + "enum34", + "h5py", + "idna", + "ipaddress", + "jaraco-functools", + "lxml", + "more-itertools", + "numpy", + "parsel", + "phonenumberslite", + "pip", + "pluggy", + "portend", + "py", + "pycparser", + "pyjwt", + "pyopenssl", + "pytz", + "queuelib", + "requests", + "scrapy", + "service-identity", + "six", + "tempora", + "tox", + "twisted", + "urllib3", + "virtualenv", + "w3lib", + ), ), - (PATH_TEST_ENV_2, - ('asn1crypto', 'attrs', 'automat', 'babel', 'backports-functools-lru-cache', - 'cffi', 'chardet', 'cheroot', 'cherrypy', 'constantly', 'cryptography', - 'cssselect', 'dask', 'django', 'django-phonenumber-field', 'django-twilio', - 'entrypoints', 'h5py', 'hdf5storage', 'hyperlink', 'idna', 'incremental', - 'jaraco-functools', 'keyring', 'lxml', 'more-itertools', 'numpy', 'parsel', - 'phonenumberslite', 'pip', 'pluggy', 'portend', 'py', 'pyasn1', 'pyasn1-modules', - 'pycparser', 'pydispatcher', 'pyhamcrest', 'pyjwt', 'pyopenssl', 'pysocks', 'pytz', - 'queuelib', 'requests', 'scrapy', 'service-identity', 'six', 'tempora', 'tox', - 'twilio', 'twisted', 'urllib3', 'virtualenv', 'w3lib', 'zope-interface') + ( + PATH_TEST_ENV_2, + ( + "asn1crypto", + "attrs", + "automat", + "babel", + "backports-functools-lru-cache", + "cffi", + "chardet", + "cheroot", + "cherrypy", + "constantly", + "cryptography", + "cssselect", + "dask", + "django", + "django-phonenumber-field", + "django-twilio", + "entrypoints", + "h5py", + "hdf5storage", + "hyperlink", + "idna", + "incremental", + "jaraco-functools", + "keyring", + "lxml", + "more-itertools", + "numpy", + "parsel", + "phonenumberslite", + "pip", + "pluggy", + "portend", + "py", + "pyasn1", + "pyasn1-modules", + "pycparser", + "pydispatcher", + "pyhamcrest", + "pyjwt", + "pyopenssl", + "pysocks", + "pytz", + "queuelib", + "requests", + "scrapy", + "service-identity", + "six", + "tempora", + "tox", + "twilio", + "twisted", + "urllib3", + "virtualenv", + "w3lib", + "zope-interface", + ), ), ) @@ -140,7 +315,7 @@ def test_pip_interop_osx(): prefixdata.load() records = prefixdata._load_site_packages() record_names = tuple(sorted(records.keys())) - print('RECORDS', record_names) + print("RECORDS", record_names) assert len(record_names), len(expected_output) _print_output(expected_output, record_names) for record_name in record_names: @@ -153,19 +328,19 @@ def test_pip_interop_osx(): def test_get_conda_anchor_files_and_records(): valid_tests = [ - 'v/site-packages/spam.egg-info/PKG-INFO', - 'v/site-packages/foo.dist-info/RECORD', - 'v/site-packages/bar.egg-info', + "v/site-packages/spam.egg-info/PKG-INFO", + "v/site-packages/foo.dist-info/RECORD", + "v/site-packages/bar.egg-info", ] invalid_tests = [ - 'v/site-packages/valid-package/_vendor/invalid-now.egg-info/PKG-INFO', - 'i/site-packages/stuff.egg-link', - 'i/spam.egg-info/PKG-INFO', - 'i/foo.dist-info/RECORD', - 'i/bar.egg-info', - 'i/site-packages/spam', - 'i/site-packages/foo', - 'i/site-packages/bar', + "v/site-packages/valid-package/_vendor/invalid-now.egg-info/PKG-INFO", + "i/site-packages/stuff.egg-link", + "i/spam.egg-info/PKG-INFO", + "i/foo.dist-info/RECORD", + "i/bar.egg-info", + "i/site-packages/spam", + "i/site-packages/foo", + "i/site-packages/bar", ] tests = valid_tests + invalid_tests records = [] @@ -196,16 +371,15 @@ def test_corrupt_json_conda_meta_json(): class PrefixDatarUnitTests(TestCase): - def setUp(self): tempdirdir = gettempdir() dirname = str(uuid4())[:8] self.prefix = join(tempdirdir, dirname) mkdir_p(self.prefix) assert isdir(self.prefix) - mkdir_p(join(self.prefix, 'conda-meta')) + mkdir_p(join(self.prefix, "conda-meta")) activate_env_vars = join(self.prefix, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) self.pd = PrefixData(self.prefix) @@ -214,11 +388,7 @@ def tearDown(self): assert not lexists(self.prefix) def test_get_environment_env_vars(self): - ex_env_vars = { - "ENV_ONE": "one", - "ENV_TWO": "you", - "ENV_THREE": "me" - } + ex_env_vars = {"ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me"} env_vars = self.pd.get_environment_env_vars() assert ex_env_vars == env_vars @@ -232,13 +402,13 @@ def test_set_unset_environment_env_vars(self): "ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me", - "WOAH": "dude" + "WOAH": "dude", } - self.pd.set_environment_env_vars({"WOAH":"dude"}) + self.pd.set_environment_env_vars({"WOAH": "dude"}) env_vars = self.pd.get_environment_env_vars() assert env_vars_add == env_vars - self.pd.unset_environment_env_vars(['WOAH']) + self.pd.unset_environment_env_vars(["WOAH"]) env_vars = self.pd.get_environment_env_vars() assert env_vars_one == env_vars @@ -248,6 +418,6 @@ def test_set_unset_environment_env_vars_no_exist(self): "ENV_TWO": "you", "ENV_THREE": "me", } - self.pd.unset_environment_env_vars(['WOAH']) + self.pd.unset_environment_env_vars(["WOAH"]) env_vars = self.pd.get_environment_env_vars() assert env_vars_one == env_vars diff --git a/tests/core/test_solve.py b/tests/core/test_solve.py index 7f855b73446..efb839a7749 100644 --- a/tests/core/test_solve.py +++ b/tests/core/test_solve.py @@ -1,98 +1,134 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - -from pprint import pprint +import copy import platform import sys -import copy +from pprint import pprint from unittest.mock import Mock import pytest +from conda._vendor.cpuinfo import get_cpu_info from conda.auxlib.ish import dals -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context from conda.common.compat import on_linux from conda.common.io import env_var, env_vars from conda.core.solve import DepsModifier, UpdateModifier -from conda.exceptions import UnsatisfiableError, SpecsConfigurationConflictError +from conda.exceptions import SpecsConfigurationConflictError, UnsatisfiableError from conda.models.channel import Channel -from conda.models.records import PrefixRecord from conda.models.enums import PackageType +from conda.models.records import PrefixRecord from conda.resolve import MatchSpec -from conda.testing.helpers import add_subdir_to_iter, get_solver, get_solver_2, get_solver_4, \ - get_solver_aggregate_1, get_solver_aggregate_2, get_solver_cuda, get_solver_must_unfreeze, \ - convert_to_dist_str, CHANNEL_DIR -from conda._vendor.cpuinfo import get_cpu_info +from conda.testing.helpers import ( + CHANNEL_DIR, + add_subdir_to_iter, + convert_to_dist_str, + get_solver, + get_solver_2, + get_solver_4, + get_solver_aggregate_1, + get_solver_aggregate_2, + get_solver_cuda, + get_solver_must_unfreeze, +) def test_solve_1(tmpdir): - specs = MatchSpec("numpy"), + specs = (MatchSpec("numpy"),) with get_solver(tmpdir, specs) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-3.3.2-0', - 'channel-1::numpy-1.7.1-py33_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-3.3.2-0", + "channel-1::numpy-1.7.1-py33_0", + ) + ) assert convert_to_dist_str(final_state) == order - specs_to_add = MatchSpec("python=2"), - with get_solver(tmpdir, specs_to_add=specs_to_add, - prefix_records=final_state, history_specs=specs) as solver: + specs_to_add = (MatchSpec("python=2"),) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state, + history_specs=specs, + ) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::numpy-1.7.1-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::numpy-1.7.1-py27_0", + ) + ) assert convert_to_dist_str(final_state) == order def test_solve_2(tmpdir): - specs = MatchSpec("numpy"), + specs = (MatchSpec("numpy"),) with get_solver_aggregate_1(tmpdir, specs) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-2::mkl-2017.0.3-0', - 'channel-2::openssl-1.0.2l-0', - 'channel-2::readline-6.2-2', - 'channel-2::sqlite-3.13.0-0', - 'channel-2::tk-8.5.18-0', - 'channel-2::xz-5.2.3-0', - 'channel-2::zlib-1.2.11-0', - 'channel-2::python-3.6.2-0', - 'channel-2::numpy-1.13.1-py36_0' - )) + order = add_subdir_to_iter( + ( + "channel-2::mkl-2017.0.3-0", + "channel-2::openssl-1.0.2l-0", + "channel-2::readline-6.2-2", + "channel-2::sqlite-3.13.0-0", + "channel-2::tk-8.5.18-0", + "channel-2::xz-5.2.3-0", + "channel-2::zlib-1.2.11-0", + "channel-2::python-3.6.2-0", + "channel-2::numpy-1.13.1-py36_0", + ) + ) assert convert_to_dist_str(final_state) == order - specs_to_add = MatchSpec("channel-4::numpy"), - with get_solver_aggregate_1(tmpdir, specs_to_add=specs_to_add, - prefix_records=final_state, history_specs=specs) as solver: + specs_to_add = (MatchSpec("channel-4::numpy"),) + with get_solver_aggregate_1( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state, + history_specs=specs, + ) as solver: solver.solve_final_state() - extra_prec = PrefixRecord(_hash=5842798532132402024, name='mkl', version='2017.0.3', - build='0', build_number=0, channel=Channel("channel-2/osx-64"), - subdir='osx-64', fn='mkl-2017.0.3-0.tar.bz2', - md5='76cfa5d21e73db338ffccdbe0af8a727', - url='https://conda.anaconda.org/channel-2/osx-64/mkl-2017.0.3-0.tar.bz2', - arch='x86_64', platform='darwin', depends=(), constrains=(), - track_features=(), features=(), license='proprietary - Intel', - license_family='Proprietary', timestamp=0, date='2017-06-26', size=135839394) + extra_prec = PrefixRecord( + _hash=5842798532132402024, + name="mkl", + version="2017.0.3", + build="0", + build_number=0, + channel=Channel("channel-2/osx-64"), + subdir="osx-64", + fn="mkl-2017.0.3-0.tar.bz2", + md5="76cfa5d21e73db338ffccdbe0af8a727", + url="https://conda.anaconda.org/channel-2/osx-64/mkl-2017.0.3-0.tar.bz2", + arch="x86_64", + platform="darwin", + depends=(), + constrains=(), + track_features=(), + features=(), + license="proprietary - Intel", + license_family="Proprietary", + timestamp=0, + date="2017-06-26", + size=135839394, + ) solver_ssc = copy.copy(solver.ssc) ssc = solver.ssc @@ -108,37 +144,42 @@ def test_solve_2(tmpdir): solver._run_sat = Mock(return_value=ssc) # Give solver the modified ssc solver.ssc = solver_ssc - final_state = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_ALL) + final_state = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_ALL + ) prec_names = [_.name for _ in final_state] assert len(prec_names) == len(set(prec_names)) def test_virtual_package_solver(tmpdir, clear_cuda_version): - specs = MatchSpec("cudatoolkit"), + specs = (MatchSpec("cudatoolkit"),) - with env_var('CONDA_OVERRIDE_CUDA', '10.0'): + with env_var("CONDA_OVERRIDE_CUDA", "10.0"): with get_solver_cuda(tmpdir, specs) as solver: _ = solver.solve_final_state() ssc = solver.ssc # Check the cuda virtual package is included in the solver - assert '__cuda' in ssc.specs_map.keys() + assert "__cuda" in ssc.specs_map.keys() # Check that the environment is consistent after installing a # package which *depends* on a virtual package for pkgs in ssc.solution_precs: - if pkgs.name == 'cudatoolkit': + if pkgs.name == "cudatoolkit": # make sure this package depends on the __cuda virtual # package as a dependency since this is requirement of the # test the test - assert '__cuda' in pkgs.depends[0] + assert "__cuda" in pkgs.depends[0] assert ssc.r.bad_installed(ssc.solution_precs, ())[1] is None def test_solve_msgs_exclude_vp(tmpdir, clear_cuda_version): # Sovler hints should exclude virtual packages that are not dependencies - specs = MatchSpec("python =2.7.5"), MatchSpec("readline =5.0"), + specs = ( + MatchSpec("python =2.7.5"), + MatchSpec("readline =5.0"), + ) - with env_var('CONDA_OVERRIDE_CUDA', '10.0'): + with env_var("CONDA_OVERRIDE_CUDA", "10.0"): with get_solver_cuda(tmpdir, specs) as solver: with pytest.raises(UnsatisfiableError) as exc: solver.solve_final_state() @@ -147,36 +188,32 @@ def test_solve_msgs_exclude_vp(tmpdir, clear_cuda_version): def test_cuda_1(tmpdir, clear_cuda_version): - specs = MatchSpec("cudatoolkit"), + specs = (MatchSpec("cudatoolkit"),) - with env_var('CONDA_OVERRIDE_CUDA', '9.2'): + with env_var("CONDA_OVERRIDE_CUDA", "9.2"): with get_solver_cuda(tmpdir, specs) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-1::cudatoolkit-9.0-0', - )) + order = add_subdir_to_iter(("channel-1::cudatoolkit-9.0-0",)) assert convert_to_dist_str(final_state) == order def test_cuda_2(tmpdir, clear_cuda_version): - specs = MatchSpec("cudatoolkit"), + specs = (MatchSpec("cudatoolkit"),) - with env_var('CONDA_OVERRIDE_CUDA', '10.0'): + with env_var("CONDA_OVERRIDE_CUDA", "10.0"): with get_solver_cuda(tmpdir, specs) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-1::cudatoolkit-10.0-0', - )) + order = add_subdir_to_iter(("channel-1::cudatoolkit-10.0-0",)) assert convert_to_dist_str(final_state) == order def test_cuda_fail_1(tmpdir, clear_cuda_version): - specs = MatchSpec("cudatoolkit"), + specs = (MatchSpec("cudatoolkit"),) # No cudatoolkit in index for CUDA 8.0 - with env_var('CONDA_OVERRIDE_CUDA', '8.0'): + with env_var("CONDA_OVERRIDE_CUDA", "8.0"): with get_solver_cuda(tmpdir, specs) as solver: with pytest.raises(UnsatisfiableError) as exc: solver.solve_final_state() @@ -196,230 +233,262 @@ def test_cuda_fail_1(tmpdir, clear_cuda_version): else: plat = "linux-64" - assert str(exc.value).strip() == dals("""The following specifications were found to be incompatible with your system: + assert str(exc.value).strip() == dals( + """The following specifications were found to be incompatible with your system: - feature:/{}::__cuda==8.0=0 - cudatoolkit -> __cuda[version='>=10.0|>=9.0'] -Your installed version is: 8.0""".format(plat)) +Your installed version is: 8.0""".format( + plat + ) + ) def test_cuda_fail_2(tmpdir, clear_cuda_version): - specs = MatchSpec("cudatoolkit"), + specs = (MatchSpec("cudatoolkit"),) # No CUDA on system - with env_var('CONDA_OVERRIDE_CUDA', ''): + with env_var("CONDA_OVERRIDE_CUDA", ""): with get_solver_cuda(tmpdir, specs) as solver: with pytest.raises(UnsatisfiableError) as exc: solver.solve_final_state() - assert str(exc.value).strip() == dals("""The following specifications were found to be incompatible with your system: + assert str(exc.value).strip() == dals( + """The following specifications were found to be incompatible with your system: - cudatoolkit -> __cuda[version='>=10.0|>=9.0'] -Your installed version is: not available""") +Your installed version is: not available""" + ) def test_cuda_constrain_absent(tmpdir, clear_cuda_version): - specs = MatchSpec("cuda-constrain"), + specs = (MatchSpec("cuda-constrain"),) - with env_var('CONDA_OVERRIDE_CUDA', ''): + with env_var("CONDA_OVERRIDE_CUDA", ""): with get_solver_cuda(tmpdir, specs) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-1::cuda-constrain-11.0-0', - )) + order = add_subdir_to_iter(("channel-1::cuda-constrain-11.0-0",)) assert convert_to_dist_str(final_state) == order @pytest.mark.skip(reason="known broken; fix to be implemented") def test_cuda_constrain_sat(tmpdir, clear_cuda_version): - specs = MatchSpec("cuda-constrain"), + specs = (MatchSpec("cuda-constrain"),) - with env_var('CONDA_OVERRIDE_CUDA', '10.0'): + with env_var("CONDA_OVERRIDE_CUDA", "10.0"): with get_solver_cuda(tmpdir, specs) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-1::cuda-constrain-10.0-0', - )) + order = add_subdir_to_iter(("channel-1::cuda-constrain-10.0-0",)) assert convert_to_dist_str(final_state) == order @pytest.mark.skip(reason="known broken; fix to be implemented") def test_cuda_constrain_unsat(tmpdir, clear_cuda_version): - specs = MatchSpec("cuda-constrain"), + specs = (MatchSpec("cuda-constrain"),) # No cudatoolkit in index for CUDA 8.0 - with env_var('CONDA_OVERRIDE_CUDA', '8.0'): + with env_var("CONDA_OVERRIDE_CUDA", "8.0"): with get_solver_cuda(tmpdir, specs) as solver: with pytest.raises(UnsatisfiableError) as exc: solver.solve_final_state() - assert str(exc.value).strip() == dals("""The following specifications were found to be incompatible with your system: + assert str(exc.value).strip() == dals( + """The following specifications were found to be incompatible with your system: - feature:|@/{}::__cuda==8.0=0 - __cuda[version='>=10.0'] -> feature:/linux-64::__cuda==8.0=0 -Your installed version is: 8.0""".format(context.subdir)) +Your installed version is: 8.0""".format( + context.subdir + ) + ) @pytest.mark.skipif(not on_linux, reason="linux-only test") def test_cuda_glibc_sat(tmpdir, clear_cuda_version): - specs = MatchSpec("cuda-glibc"), + specs = (MatchSpec("cuda-glibc"),) - with env_var('CONDA_OVERRIDE_CUDA', '10.0'), env_var('CONDA_OVERRIDE_GLIBC', '2.23'): + with env_var("CONDA_OVERRIDE_CUDA", "10.0"), env_var( + "CONDA_OVERRIDE_GLIBC", "2.23" + ): with get_solver_cuda(tmpdir, specs) as solver: final_state = solver.solve_final_state() # print(convert_to_dist_str(final_state)) - order = add_subdir_to_iter(( - 'channel-1::cuda-glibc-10.0-0', - )) + order = add_subdir_to_iter(("channel-1::cuda-glibc-10.0-0",)) assert convert_to_dist_str(final_state) == order @pytest.mark.skip(reason="known broken; fix to be implemented") @pytest.mark.skipif(not on_linux, reason="linux-only test") def test_cuda_glibc_unsat_depend(tmpdir, clear_cuda_version): - specs = MatchSpec("cuda-glibc"), + specs = (MatchSpec("cuda-glibc"),) - with env_var('CONDA_OVERRIDE_CUDA', '8.0'), env_var('CONDA_OVERRIDE_GLIBC', '2.23'): + with env_var("CONDA_OVERRIDE_CUDA", "8.0"), env_var("CONDA_OVERRIDE_GLIBC", "2.23"): with get_solver_cuda(tmpdir, specs) as solver: with pytest.raises(UnsatisfiableError) as exc: solver.solve_final_state() - assert str(exc.value).strip() == dals("""The following specifications were found to be incompatible with your system: + assert str(exc.value).strip() == dals( + """The following specifications were found to be incompatible with your system: - feature:|@/{}::__cuda==8.0=0 - __cuda[version='>=10.0'] -> feature:/linux-64::__cuda==8.0=0 -Your installed version is: 8.0""".format(context.subdir)) +Your installed version is: 8.0""".format( + context.subdir + ) + ) @pytest.mark.skip(reason="known broken; fix to be implemented") @pytest.mark.skipif(not on_linux, reason="linux-only test") def test_cuda_glibc_unsat_constrain(tmpdir, clear_cuda_version): - specs = MatchSpec("cuda-glibc"), + specs = (MatchSpec("cuda-glibc"),) - with env_var('CONDA_OVERRIDE_CUDA', '10.0'), env_var('CONDA_OVERRIDE_GLIBC', '2.12'): + with env_var("CONDA_OVERRIDE_CUDA", "10.0"), env_var( + "CONDA_OVERRIDE_GLIBC", "2.12" + ): with get_solver_cuda(tmpdir, specs) as solver: with pytest.raises(UnsatisfiableError): solver.solve_final_state() def test_prune_1(tmpdir): - specs = MatchSpec("numpy=1.6"), MatchSpec("python=2.7.3"), MatchSpec("accelerate"), + specs = ( + MatchSpec("numpy=1.6"), + MatchSpec("python=2.7.3"), + MatchSpec("accelerate"), + ) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::libnvvm-1.0-p0', - 'channel-1::mkl-rt-11.0-p0', - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.7.3-7', - 'channel-1::bitarray-0.8.1-py27_0', - 'channel-1::llvmpy-0.11.2-py27_0', - 'channel-1::meta-0.4.2.dev-py27_0', - 'channel-1::mkl-service-1.0.0-py27_p0', - 'channel-1::numpy-1.6.2-py27_p4', - 'channel-1::numba-0.8.1-np16py27_0', - 'channel-1::numexpr-2.1-np16py27_p0', - 'channel-1::scipy-0.12.0-np16py27_p0', - 'channel-1::numbapro-0.11.0-np16py27_p0', - 'channel-1::scikit-learn-0.13.1-np16py27_p0', - 'channel-1::mkl-11.0-np16py27_p0', - 'channel-1::accelerate-1.1.0-np16py27_p0', - )) + order = add_subdir_to_iter( + ( + "channel-1::libnvvm-1.0-p0", + "channel-1::mkl-rt-11.0-p0", + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.7.3-7", + "channel-1::bitarray-0.8.1-py27_0", + "channel-1::llvmpy-0.11.2-py27_0", + "channel-1::meta-0.4.2.dev-py27_0", + "channel-1::mkl-service-1.0.0-py27_p0", + "channel-1::numpy-1.6.2-py27_p4", + "channel-1::numba-0.8.1-np16py27_0", + "channel-1::numexpr-2.1-np16py27_p0", + "channel-1::scipy-0.12.0-np16py27_p0", + "channel-1::numbapro-0.11.0-np16py27_p0", + "channel-1::scikit-learn-0.13.1-np16py27_p0", + "channel-1::mkl-11.0-np16py27_p0", + "channel-1::accelerate-1.1.0-np16py27_p0", + ) + ) assert convert_to_dist_str(final_state_1) == order - specs_to_remove = MatchSpec("numbapro"), - with get_solver(tmpdir, specs_to_remove=specs_to_remove, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_remove = (MatchSpec("numbapro"),) + with get_solver( + tmpdir, + specs_to_remove=specs_to_remove, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) - unlink_order = add_subdir_to_iter(( - 'channel-1::accelerate-1.1.0-np16py27_p0', - 'channel-1::mkl-11.0-np16py27_p0', - 'channel-1::scikit-learn-0.13.1-np16py27_p0', - 'channel-1::numbapro-0.11.0-np16py27_p0', - 'channel-1::scipy-0.12.0-np16py27_p0', - 'channel-1::numexpr-2.1-np16py27_p0', - 'channel-1::numba-0.8.1-np16py27_0', - 'channel-1::numpy-1.6.2-py27_p4', - 'channel-1::mkl-service-1.0.0-py27_p0', - 'channel-1::meta-0.4.2.dev-py27_0', - 'channel-1::llvmpy-0.11.2-py27_0', - 'channel-1::bitarray-0.8.1-py27_0', - 'channel-1::llvm-3.2-0', - 'channel-1::mkl-rt-11.0-p0', - 'channel-1::libnvvm-1.0-p0', - )) - link_order = add_subdir_to_iter(( - 'channel-1::numpy-1.6.2-py27_4', - )) + unlink_order = add_subdir_to_iter( + ( + "channel-1::accelerate-1.1.0-np16py27_p0", + "channel-1::mkl-11.0-np16py27_p0", + "channel-1::scikit-learn-0.13.1-np16py27_p0", + "channel-1::numbapro-0.11.0-np16py27_p0", + "channel-1::scipy-0.12.0-np16py27_p0", + "channel-1::numexpr-2.1-np16py27_p0", + "channel-1::numba-0.8.1-np16py27_0", + "channel-1::numpy-1.6.2-py27_p4", + "channel-1::mkl-service-1.0.0-py27_p0", + "channel-1::meta-0.4.2.dev-py27_0", + "channel-1::llvmpy-0.11.2-py27_0", + "channel-1::bitarray-0.8.1-py27_0", + "channel-1::llvm-3.2-0", + "channel-1::mkl-rt-11.0-p0", + "channel-1::libnvvm-1.0-p0", + ) + ) + link_order = add_subdir_to_iter(("channel-1::numpy-1.6.2-py27_4",)) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order def test_force_remove_1(tmpdir): - specs = MatchSpec("numpy[version=*,build=*py27*]"), + specs = (MatchSpec("numpy[version=*,build=*py27*]"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::numpy-1.7.1-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::numpy-1.7.1-py27_0", + ) + ) assert convert_to_dist_str(final_state_1) == order # without force_remove, taking out python takes out everything that depends on it, too, # so numpy goes away. All of pythons' deps are also pruned. - specs_to_remove = MatchSpec("python"), - with get_solver(tmpdir, specs_to_remove=specs_to_remove, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_remove = (MatchSpec("python"),) + with get_solver( + tmpdir, + specs_to_remove=specs_to_remove, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) # openssl remains because it is in the aggressive_update_packages set, # but everything else gets removed - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - )) + order = add_subdir_to_iter(("channel-1::openssl-1.0.1c-0",)) assert convert_to_dist_str(final_state_2) == order # with force remove, we remove only the explicit specs that we provide # this leaves an inconsistent env - specs_to_remove = MatchSpec("python"), - with get_solver(tmpdir, specs_to_remove=specs_to_remove, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_remove = (MatchSpec("python"),) + with get_solver( + tmpdir, + specs_to_remove=specs_to_remove, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: final_state_2 = solver.solve_final_state(force_remove=True) # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::numpy-1.7.1-py27_0', - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - )) + order = add_subdir_to_iter( + ( + "channel-1::numpy-1.7.1-py27_0", + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + ) + ) assert convert_to_dist_str(final_state_2) == order # re-solving restores order @@ -427,224 +496,269 @@ def test_force_remove_1(tmpdir): final_state_3 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_3)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::numpy-1.7.1-py27_0' - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::numpy-1.7.1-py27_0", + ) + ) assert convert_to_dist_str(final_state_3) == order def test_no_deps_1(tmpdir): - specs = MatchSpec("python=2"), + specs = (MatchSpec("python=2"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + ) + ) assert convert_to_dist_str(final_state_1) == order - specs_to_add = MatchSpec("numba"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("numba"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.7.5-0', - 'channel-1::llvmpy-0.11.2-py27_0', - 'channel-1::meta-0.4.2.dev-py27_0', - 'channel-1::numpy-1.7.1-py27_0', - 'channel-1::numba-0.8.1-np17py27_0' - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.7.5-0", + "channel-1::llvmpy-0.11.2-py27_0", + "channel-1::meta-0.4.2.dev-py27_0", + "channel-1::numpy-1.7.1-py27_0", + "channel-1::numba-0.8.1-np17py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order - specs_to_add = MatchSpec("numba"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: - final_state_2 = solver.solve_final_state(deps_modifier='NO_DEPS') + specs_to_add = (MatchSpec("numba"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: + final_state_2 = solver.solve_final_state(deps_modifier="NO_DEPS") # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::numba-0.8.1-np17py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::numba-0.8.1-np17py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order def test_only_deps_1(tmpdir): - specs = MatchSpec("numba[version=*,build=*py27*]"), + specs = (MatchSpec("numba[version=*,build=*py27*]"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state(deps_modifier=DepsModifier.ONLY_DEPS) # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.7.5-0', - 'channel-1::llvmpy-0.11.2-py27_0', - 'channel-1::meta-0.4.2.dev-py27_0', - 'channel-1::numpy-1.7.1-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.7.5-0", + "channel-1::llvmpy-0.11.2-py27_0", + "channel-1::meta-0.4.2.dev-py27_0", + "channel-1::numpy-1.7.1-py27_0", + ) + ) assert convert_to_dist_str(final_state_1) == order def test_only_deps_2(tmpdir): - specs = MatchSpec("numpy=1.5"), MatchSpec("python=2.7.3"), + specs = ( + MatchSpec("numpy=1.5"), + MatchSpec("python=2.7.3"), + ) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.3-7', - 'channel-1::numpy-1.5.1-py27_4', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.3-7", + "channel-1::numpy-1.5.1-py27_4", + ) + ) assert convert_to_dist_str(final_state_1) == order - specs_to_add = MatchSpec("numba=0.5"), + specs_to_add = (MatchSpec("numba=0.5"),) with get_solver(tmpdir, specs_to_add) as solver: final_state_2 = solver.solve_final_state(deps_modifier=DepsModifier.ONLY_DEPS) # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.7.5-0', - 'channel-1::llvmpy-0.10.0-py27_0', - 'channel-1::meta-0.4.2.dev-py27_0', - 'channel-1::nose-1.3.0-py27_0', - 'channel-1::numpy-1.7.1-py27_0', - # 'channel-1::numba-0.5.0-np17py27_0', # not in the order because only_deps - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.7.5-0", + "channel-1::llvmpy-0.10.0-py27_0", + "channel-1::meta-0.4.2.dev-py27_0", + "channel-1::nose-1.3.0-py27_0", + "channel-1::numpy-1.7.1-py27_0", + # 'channel-1::numba-0.5.0-np17py27_0', # not in the order because only_deps + ) + ) assert convert_to_dist_str(final_state_2) == order # fails because numpy=1.5 is in our history as an explicit spec - specs_to_add = MatchSpec("numba=0.5"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("numba=0.5"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: with pytest.raises(UnsatisfiableError): - final_state_2 = solver.solve_final_state(deps_modifier=DepsModifier.ONLY_DEPS) + final_state_2 = solver.solve_final_state( + deps_modifier=DepsModifier.ONLY_DEPS + ) specs_to_add = MatchSpec("numba=0.5"), MatchSpec("numpy") - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state(deps_modifier=DepsModifier.ONLY_DEPS) # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.7.3-7', - 'channel-1::llvmpy-0.10.0-py27_0', - 'channel-1::meta-0.4.2.dev-py27_0', - 'channel-1::nose-1.3.0-py27_0', - 'channel-1::numpy-1.7.1-py27_0', - # 'channel-1::numba-0.5.0-np17py27_0', # not in the order because only_deps - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.7.3-7", + "channel-1::llvmpy-0.10.0-py27_0", + "channel-1::meta-0.4.2.dev-py27_0", + "channel-1::nose-1.3.0-py27_0", + "channel-1::numpy-1.7.1-py27_0", + # 'channel-1::numba-0.5.0-np17py27_0', # not in the order because only_deps + ) + ) assert convert_to_dist_str(final_state_2) == order def test_update_all_1(tmpdir): - specs = MatchSpec("numpy=1.5"), MatchSpec("python=2.6"), MatchSpec("system[version=*,build_number=0]") + specs = ( + MatchSpec("numpy=1.5"), + MatchSpec("python=2.6"), + MatchSpec("system[version=*,build_number=0]"), + ) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-0', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.6.8-6', - 'channel-1::numpy-1.5.1-py26_4', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-0", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.6.8-6", + "channel-1::numpy-1.5.1-py26_4", + ) + ) assert convert_to_dist_str(final_state_1) == order specs_to_add = MatchSpec("numba=0.6"), MatchSpec("numpy") - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-0', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.6.8-6', - 'channel-1::llvmpy-0.10.2-py26_0', - 'channel-1::nose-1.3.0-py26_0', - 'channel-1::numpy-1.7.1-py26_0', - 'channel-1::numba-0.6.0-np17py26_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-0", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.6.8-6", + "channel-1::llvmpy-0.10.2-py26_0", + "channel-1::nose-1.3.0-py26_0", + "channel-1::numpy-1.7.1-py26_0", + "channel-1::numba-0.6.0-np17py26_0", + ) + ) assert convert_to_dist_str(final_state_2) == order - specs_to_add = MatchSpec("numba=0.6"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: - final_state_2 = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_ALL) + specs_to_add = (MatchSpec("numba=0.6"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: + final_state_2 = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_ALL + ) # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.6.8-6', # stick with python=2.6 even though UPDATE_ALL - 'channel-1::llvmpy-0.10.2-py26_0', - 'channel-1::nose-1.3.0-py26_0', - 'channel-1::numpy-1.7.1-py26_0', - 'channel-1::numba-0.6.0-np17py26_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.6.8-6", # stick with python=2.6 even though UPDATE_ALL + "channel-1::llvmpy-0.10.2-py26_0", + "channel-1::nose-1.3.0-py26_0", + "channel-1::numpy-1.7.1-py26_0", + "channel-1::numba-0.6.0-np17py26_0", + ) + ) assert convert_to_dist_str(final_state_2) == order @@ -654,21 +768,23 @@ def test_broken_install(tmpdir): final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order_original = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::numpy-1.6.2-py27_4', - 'channel-1::pytz-2013b-py27_0', - 'channel-1::six-1.3.0-py27_0', - 'channel-1::dateutil-2.1-py27_1', - 'channel-1::scipy-0.12.0-np16py27_0', - 'channel-1::pandas-0.11.0-np16py27_1', - )) + order_original = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::numpy-1.6.2-py27_4", + "channel-1::pytz-2013b-py27_0", + "channel-1::six-1.3.0-py27_0", + "channel-1::dateutil-2.1-py27_1", + "channel-1::scipy-0.12.0-np16py27_0", + "channel-1::pandas-0.11.0-np16py27_1", + ) + ) assert convert_to_dist_str(final_state_1) == order_original assert solver._r.environment_is_consistent(final_state_1) @@ -679,225 +795,261 @@ def test_broken_install(tmpdir): final_state_1_modified[7] = numpy_prec assert not solver._r.environment_is_consistent(final_state_1_modified) - specs_to_add = MatchSpec("flask"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1_modified, history_specs=specs) as solver: + specs_to_add = (MatchSpec("flask"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1_modified, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - "channel-1::numpy-1.7.1-py33_p0", - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::jinja2-2.6-py27_0', - 'channel-1::pytz-2013b-py27_0', - 'channel-1::scipy-0.12.0-np16py27_0', - 'channel-1::six-1.3.0-py27_0', - 'channel-1::werkzeug-0.8.3-py27_0', - 'channel-1::dateutil-2.1-py27_1', - 'channel-1::flask-0.9-py27_0', - 'channel-1::pandas-0.11.0-np16py27_1' - )) + order = add_subdir_to_iter( + ( + "channel-1::numpy-1.7.1-py33_p0", + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::jinja2-2.6-py27_0", + "channel-1::pytz-2013b-py27_0", + "channel-1::scipy-0.12.0-np16py27_0", + "channel-1::six-1.3.0-py27_0", + "channel-1::werkzeug-0.8.3-py27_0", + "channel-1::dateutil-2.1-py27_1", + "channel-1::flask-0.9-py27_0", + "channel-1::pandas-0.11.0-np16py27_1", + ) + ) assert convert_to_dist_str(final_state_2) == order assert not solver._r.environment_is_consistent(final_state_2) # adding numpy spec again snaps the packages back to a consistent state - specs_to_add = MatchSpec("flask"), MatchSpec("numpy 1.6.*"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1_modified, history_specs=specs) as solver: + specs_to_add = ( + MatchSpec("flask"), + MatchSpec("numpy 1.6.*"), + ) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1_modified, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::jinja2-2.6-py27_0', - 'channel-1::numpy-1.6.2-py27_4', - 'channel-1::pytz-2013b-py27_0', - 'channel-1::six-1.3.0-py27_0', - 'channel-1::werkzeug-0.8.3-py27_0', - 'channel-1::dateutil-2.1-py27_1', - 'channel-1::flask-0.9-py27_0', - 'channel-1::scipy-0.12.0-np16py27_0', - 'channel-1::pandas-0.11.0-np16py27_1', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::jinja2-2.6-py27_0", + "channel-1::numpy-1.6.2-py27_4", + "channel-1::pytz-2013b-py27_0", + "channel-1::six-1.3.0-py27_0", + "channel-1::werkzeug-0.8.3-py27_0", + "channel-1::dateutil-2.1-py27_1", + "channel-1::flask-0.9-py27_0", + "channel-1::scipy-0.12.0-np16py27_0", + "channel-1::pandas-0.11.0-np16py27_1", + ) + ) assert convert_to_dist_str(final_state_2) == order assert solver._r.environment_is_consistent(final_state_2) # Add an incompatible pandas; installation should be untouched, then fixed final_state_2_mod = list(final_state_1) - pandas_matcher = MatchSpec('channel-1::pandas==0.11.0=np17py27_1') + pandas_matcher = MatchSpec("channel-1::pandas==0.11.0=np17py27_1") pandas_prec = next(prec for prec in solver._index if pandas_matcher.match(prec)) final_state_2_mod[12] = pandas_prec assert not solver._r.environment_is_consistent(final_state_2_mod) def test_conda_downgrade(tmpdir): - specs = MatchSpec("conda-build"), - with env_var("CONDA_CHANNEL_PRIORITY", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol): + specs = (MatchSpec("conda-build"),) + with env_var( + "CONDA_CHANNEL_PRIORITY", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with get_solver_aggregate_1(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-2::conda-env-2.6.0-0', - 'channel-2::libffi-3.2.1-1', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-2::zlib-1.2.11-0', - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::patchelf-0.9-hf484d3e_2', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::xz-5.2.4-h14c3975_4', - 'channel-4::yaml-0.1.7-had09818_2', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.24.0-h84994c4_0', - 'channel-4::python-3.7.0-hc3d631a_0', - 'channel-4::asn1crypto-0.24.0-py37_0', - 'channel-4::beautifulsoup4-4.6.3-py37_0', - 'channel-4::certifi-2018.8.13-py37_0', - 'channel-4::chardet-3.0.4-py37_1', - 'channel-4::cryptography-vectors-2.3-py37_0', - 'channel-4::filelock-3.0.4-py37_0', - 'channel-4::glob2-0.6-py37_0', - 'channel-4::idna-2.7-py37_0', - 'channel-4::markupsafe-1.0-py37h14c3975_1', - 'channel-4::pkginfo-1.4.2-py37_1', - 'channel-4::psutil-5.4.6-py37h14c3975_0', - 'channel-4::pycosat-0.6.3-py37h14c3975_0', - 'channel-4::pycparser-2.18-py37_1', - 'channel-4::pysocks-1.6.8-py37_0', - 'channel-4::pyyaml-3.13-py37h14c3975_0', - 'channel-4::ruamel_yaml-0.15.46-py37h14c3975_0', - 'channel-4::six-1.11.0-py37_1', - 'channel-4::cffi-1.11.5-py37h9745a5d_0', - 'channel-4::setuptools-40.0.0-py37_0', - 'channel-4::cryptography-2.3-py37hb7f436b_0', - 'channel-4::jinja2-2.10-py37_0', - 'channel-4::pyopenssl-18.0.0-py37_0', - 'channel-4::urllib3-1.23-py37_0', - 'channel-4::requests-2.19.1-py37_0', - 'channel-4::conda-4.5.10-py37_0', - 'channel-4::conda-build-3.12.1-py37_0' - )) + order = add_subdir_to_iter( + ( + "channel-4::ca-certificates-2018.03.07-0", + "channel-2::conda-env-2.6.0-0", + "channel-2::libffi-3.2.1-1", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-2::zlib-1.2.11-0", + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::patchelf-0.9-hf484d3e_2", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::xz-5.2.4-h14c3975_4", + "channel-4::yaml-0.1.7-had09818_2", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.24.0-h84994c4_0", + "channel-4::python-3.7.0-hc3d631a_0", + "channel-4::asn1crypto-0.24.0-py37_0", + "channel-4::beautifulsoup4-4.6.3-py37_0", + "channel-4::certifi-2018.8.13-py37_0", + "channel-4::chardet-3.0.4-py37_1", + "channel-4::cryptography-vectors-2.3-py37_0", + "channel-4::filelock-3.0.4-py37_0", + "channel-4::glob2-0.6-py37_0", + "channel-4::idna-2.7-py37_0", + "channel-4::markupsafe-1.0-py37h14c3975_1", + "channel-4::pkginfo-1.4.2-py37_1", + "channel-4::psutil-5.4.6-py37h14c3975_0", + "channel-4::pycosat-0.6.3-py37h14c3975_0", + "channel-4::pycparser-2.18-py37_1", + "channel-4::pysocks-1.6.8-py37_0", + "channel-4::pyyaml-3.13-py37h14c3975_0", + "channel-4::ruamel_yaml-0.15.46-py37h14c3975_0", + "channel-4::six-1.11.0-py37_1", + "channel-4::cffi-1.11.5-py37h9745a5d_0", + "channel-4::setuptools-40.0.0-py37_0", + "channel-4::cryptography-2.3-py37hb7f436b_0", + "channel-4::jinja2-2.10-py37_0", + "channel-4::pyopenssl-18.0.0-py37_0", + "channel-4::urllib3-1.23-py37_0", + "channel-4::requests-2.19.1-py37_0", + "channel-4::conda-4.5.10-py37_0", + "channel-4::conda-build-3.12.1-py37_0", + ) + ) assert convert_to_dist_str(final_state_1) == order - specs_to_add = MatchSpec("itsdangerous"), # MatchSpec("conda"), + specs_to_add = (MatchSpec("itsdangerous"),) # MatchSpec("conda"), saved_sys_prefix = sys.prefix try: sys.prefix = tmpdir.strpath - with get_solver_aggregate_1(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + with get_solver_aggregate_1( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) unlink_order = ( # no conda downgrade ) - link_order = add_subdir_to_iter(( - 'channel-2::itsdangerous-0.24-py_0', - )) + link_order = add_subdir_to_iter(("channel-2::itsdangerous-0.24-py_0",)) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order - specs_to_add = MatchSpec("itsdangerous"), MatchSpec("conda"), - with get_solver_aggregate_1(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_add = ( + MatchSpec("itsdangerous"), + MatchSpec("conda"), + ) + with get_solver_aggregate_1( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order - specs_to_add = MatchSpec("itsdangerous"), MatchSpec("conda<4.4.10"), MatchSpec("python") - with get_solver_aggregate_1(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_add = ( + MatchSpec("itsdangerous"), + MatchSpec("conda<4.4.10"), + MatchSpec("python"), + ) + with get_solver_aggregate_1( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) - unlink_order = add_subdir_to_iter(( - # now conda gets downgraded - 'channel-4::conda-build-3.12.1-py37_0', - 'channel-4::conda-4.5.10-py37_0', - 'channel-4::requests-2.19.1-py37_0', - 'channel-4::urllib3-1.23-py37_0', - 'channel-4::pyopenssl-18.0.0-py37_0', - 'channel-4::jinja2-2.10-py37_0', - 'channel-4::cryptography-2.3-py37hb7f436b_0', - 'channel-4::setuptools-40.0.0-py37_0', - 'channel-4::cffi-1.11.5-py37h9745a5d_0', - 'channel-4::six-1.11.0-py37_1', - 'channel-4::ruamel_yaml-0.15.46-py37h14c3975_0', - 'channel-4::pyyaml-3.13-py37h14c3975_0', - 'channel-4::pysocks-1.6.8-py37_0', - 'channel-4::pycparser-2.18-py37_1', - 'channel-4::pycosat-0.6.3-py37h14c3975_0', - 'channel-4::psutil-5.4.6-py37h14c3975_0', - 'channel-4::pkginfo-1.4.2-py37_1', - 'channel-4::markupsafe-1.0-py37h14c3975_1', - 'channel-4::idna-2.7-py37_0', - 'channel-4::glob2-0.6-py37_0', - 'channel-4::filelock-3.0.4-py37_0', - 'channel-4::cryptography-vectors-2.3-py37_0', - 'channel-4::chardet-3.0.4-py37_1', - 'channel-4::certifi-2018.8.13-py37_0', - 'channel-4::beautifulsoup4-4.6.3-py37_0', - 'channel-4::asn1crypto-0.24.0-py37_0', - 'channel-4::python-3.7.0-hc3d631a_0', - 'channel-4::sqlite-3.24.0-h84994c4_0', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::yaml-0.1.7-had09818_2', - 'channel-4::xz-5.2.4-h14c3975_4', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::ncurses-6.1-hf484d3e_0', - )) - link_order = add_subdir_to_iter(( - 'channel-2::openssl-1.0.2l-0', - 'channel-2::readline-6.2-2', - 'channel-2::sqlite-3.13.0-0', - 'channel-2::tk-8.5.18-0', - 'channel-2::xz-5.2.3-0', - 'channel-2::yaml-0.1.6-0', - 'channel-2::python-3.6.2-0', - 'channel-2::asn1crypto-0.22.0-py36_0', - 'channel-4::beautifulsoup4-4.6.3-py36_0', - 'channel-2::certifi-2016.2.28-py36_0', - 'channel-4::chardet-3.0.4-py36_1', - 'channel-4::filelock-3.0.4-py36_0', - 'channel-4::glob2-0.6-py36_0', - 'channel-2::idna-2.6-py36_0', - 'channel-2::itsdangerous-0.24-py36_0', - 'channel-2::markupsafe-1.0-py36_0', - 'channel-4::pkginfo-1.4.2-py36_1', - 'channel-2::psutil-5.2.2-py36_0', - 'channel-2::pycosat-0.6.2-py36_0', - 'channel-2::pycparser-2.18-py36_0', - 'channel-2::pyparsing-2.2.0-py36_0', - 'channel-2::pyyaml-3.12-py36_0', - 'channel-2::requests-2.14.2-py36_0', - 'channel-2::ruamel_yaml-0.11.14-py36_1', - 'channel-2::six-1.10.0-py36_0', - 'channel-2::cffi-1.10.0-py36_0', - 'channel-2::packaging-16.8-py36_0', - 'channel-2::setuptools-36.4.0-py36_1', - 'channel-2::cryptography-1.8.1-py36_0', - 'channel-2::jinja2-2.9.6-py36_0', - 'channel-2::pyopenssl-17.0.0-py36_0', - 'channel-2::conda-4.3.30-py36h5d9f9f4_0', - 'channel-4::conda-build-3.12.1-py36_0' - )) + unlink_order = add_subdir_to_iter( + ( + # now conda gets downgraded + "channel-4::conda-build-3.12.1-py37_0", + "channel-4::conda-4.5.10-py37_0", + "channel-4::requests-2.19.1-py37_0", + "channel-4::urllib3-1.23-py37_0", + "channel-4::pyopenssl-18.0.0-py37_0", + "channel-4::jinja2-2.10-py37_0", + "channel-4::cryptography-2.3-py37hb7f436b_0", + "channel-4::setuptools-40.0.0-py37_0", + "channel-4::cffi-1.11.5-py37h9745a5d_0", + "channel-4::six-1.11.0-py37_1", + "channel-4::ruamel_yaml-0.15.46-py37h14c3975_0", + "channel-4::pyyaml-3.13-py37h14c3975_0", + "channel-4::pysocks-1.6.8-py37_0", + "channel-4::pycparser-2.18-py37_1", + "channel-4::pycosat-0.6.3-py37h14c3975_0", + "channel-4::psutil-5.4.6-py37h14c3975_0", + "channel-4::pkginfo-1.4.2-py37_1", + "channel-4::markupsafe-1.0-py37h14c3975_1", + "channel-4::idna-2.7-py37_0", + "channel-4::glob2-0.6-py37_0", + "channel-4::filelock-3.0.4-py37_0", + "channel-4::cryptography-vectors-2.3-py37_0", + "channel-4::chardet-3.0.4-py37_1", + "channel-4::certifi-2018.8.13-py37_0", + "channel-4::beautifulsoup4-4.6.3-py37_0", + "channel-4::asn1crypto-0.24.0-py37_0", + "channel-4::python-3.7.0-hc3d631a_0", + "channel-4::sqlite-3.24.0-h84994c4_0", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::yaml-0.1.7-had09818_2", + "channel-4::xz-5.2.4-h14c3975_4", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::ncurses-6.1-hf484d3e_0", + ) + ) + link_order = add_subdir_to_iter( + ( + "channel-2::openssl-1.0.2l-0", + "channel-2::readline-6.2-2", + "channel-2::sqlite-3.13.0-0", + "channel-2::tk-8.5.18-0", + "channel-2::xz-5.2.3-0", + "channel-2::yaml-0.1.6-0", + "channel-2::python-3.6.2-0", + "channel-2::asn1crypto-0.22.0-py36_0", + "channel-4::beautifulsoup4-4.6.3-py36_0", + "channel-2::certifi-2016.2.28-py36_0", + "channel-4::chardet-3.0.4-py36_1", + "channel-4::filelock-3.0.4-py36_0", + "channel-4::glob2-0.6-py36_0", + "channel-2::idna-2.6-py36_0", + "channel-2::itsdangerous-0.24-py36_0", + "channel-2::markupsafe-1.0-py36_0", + "channel-4::pkginfo-1.4.2-py36_1", + "channel-2::psutil-5.2.2-py36_0", + "channel-2::pycosat-0.6.2-py36_0", + "channel-2::pycparser-2.18-py36_0", + "channel-2::pyparsing-2.2.0-py36_0", + "channel-2::pyyaml-3.12-py36_0", + "channel-2::requests-2.14.2-py36_0", + "channel-2::ruamel_yaml-0.11.14-py36_1", + "channel-2::six-1.10.0-py36_0", + "channel-2::cffi-1.10.0-py36_0", + "channel-2::packaging-16.8-py36_0", + "channel-2::setuptools-36.4.0-py36_1", + "channel-2::cryptography-1.8.1-py36_0", + "channel-2::jinja2-2.9.6-py36_0", + "channel-2::pyopenssl-17.0.0-py36_0", + "channel-2::conda-4.3.30-py36h5d9f9f4_0", + "channel-4::conda-build-3.12.1-py36_0", + ) + ) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order finally: @@ -917,27 +1069,31 @@ def test_unfreeze_when_required(tmpdir): # if foobar is installed first it must be downgraded from 2.0. # If foobar is frozen then no solution exists. - specs = [MatchSpec("foobar"), MatchSpec('qux')] + specs = [MatchSpec("foobar"), MatchSpec("qux")] with get_solver_must_unfreeze(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-freeze::libbar-2.0-0', - 'channel-freeze::libfoo-1.0-0', - 'channel-freeze::foobar-1.0-0', - 'channel-freeze::qux-1.0-0', - )) + order = add_subdir_to_iter( + ( + "channel-freeze::libbar-2.0-0", + "channel-freeze::libfoo-1.0-0", + "channel-freeze::foobar-1.0-0", + "channel-freeze::qux-1.0-0", + ) + ) assert convert_to_dist_str(final_state_1) == order - specs = MatchSpec("foobar"), + specs = (MatchSpec("foobar"),) with get_solver_must_unfreeze(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-freeze::libbar-2.0-0', - 'channel-freeze::libfoo-2.0-0', - 'channel-freeze::foobar-2.0-0', - )) + order = add_subdir_to_iter( + ( + "channel-freeze::libbar-2.0-0", + "channel-freeze::libfoo-2.0-0", + "channel-freeze::foobar-2.0-0", + ) + ) assert convert_to_dist_str(final_state_1) == order # When frozen there is no solution - but conda tries really hard to not freeze things that conflict @@ -947,190 +1103,240 @@ def test_unfreeze_when_required(tmpdir): # with pytest.raises(UnsatisfiableError): # solver.solve_final_state(update_modifier=UpdateModifier.FREEZE_INSTALLED) - specs_to_add = MatchSpec("qux"), - with get_solver_must_unfreeze(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: - final_state_2 = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_SPECS) + specs_to_add = (MatchSpec("qux"),) + with get_solver_must_unfreeze( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: + final_state_2 = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_SPECS + ) # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-freeze::libbar-2.0-0', - 'channel-freeze::libfoo-1.0-0', - 'channel-freeze::foobar-1.0-0', - 'channel-freeze::qux-1.0-0', - )) + order = add_subdir_to_iter( + ( + "channel-freeze::libbar-2.0-0", + "channel-freeze::libfoo-1.0-0", + "channel-freeze::foobar-1.0-0", + "channel-freeze::qux-1.0-0", + ) + ) assert convert_to_dist_str(final_state_2) == order def test_auto_update_conda(tmpdir): - specs = MatchSpec("conda=1.3"), + specs = (MatchSpec("conda=1.3"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.3.5-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.3.5-py27_0", + ) + ) assert convert_to_dist_str(final_state_1) == order - with env_vars({"CONDA_AUTO_UPDATE_CONDA": "yes"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs_to_add = MatchSpec("pytz"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with env_vars( + {"CONDA_AUTO_UPDATE_CONDA": "yes"}, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + specs_to_add = (MatchSpec("pytz"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pytz-2013b-py27_0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.3.5-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pytz-2013b-py27_0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.3.5-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order saved_sys_prefix = sys.prefix try: sys.prefix = tmpdir.strpath - with env_vars({"CONDA_AUTO_UPDATE_CONDA": "yes"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs_to_add = MatchSpec("pytz"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with env_vars( + {"CONDA_AUTO_UPDATE_CONDA": "yes"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + specs_to_add = (MatchSpec("pytz"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pytz-2013b-py27_0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.5.2-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pytz-2013b-py27_0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.5.2-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order - with env_vars({"CONDA_AUTO_UPDATE_CONDA": "no"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs_to_add = MatchSpec("pytz"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with env_vars( + {"CONDA_AUTO_UPDATE_CONDA": "no"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + specs_to_add = (MatchSpec("pytz"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pytz-2013b-py27_0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.3.5-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pytz-2013b-py27_0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.3.5-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order finally: sys.prefix = saved_sys_prefix def test_explicit_conda_downgrade(tmpdir): - specs = MatchSpec("conda=1.5"), + specs = (MatchSpec("conda=1.5"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.5.2-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.5.2-py27_0", + ) + ) assert convert_to_dist_str(final_state_1) == order - with env_vars({"CONDA_AUTO_UPDATE_CONDA": "yes"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs_to_add = MatchSpec("conda=1.3"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with env_vars( + {"CONDA_AUTO_UPDATE_CONDA": "yes"}, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + specs_to_add = (MatchSpec("conda=1.3"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.3.5-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.3.5-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order saved_sys_prefix = sys.prefix try: sys.prefix = tmpdir.strpath - with env_vars({"CONDA_AUTO_UPDATE_CONDA": "yes"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs_to_add = MatchSpec("conda=1.3"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with env_vars( + {"CONDA_AUTO_UPDATE_CONDA": "yes"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + specs_to_add = (MatchSpec("conda=1.3"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.3.5-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.3.5-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order - with env_vars({"CONDA_AUTO_UPDATE_CONDA": "no"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs_to_add = MatchSpec("conda=1.3"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with env_vars( + {"CONDA_AUTO_UPDATE_CONDA": "no"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + specs_to_add = (MatchSpec("conda=1.3"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::yaml-0.1.4-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::pyyaml-3.10-py27_0', - 'channel-1::conda-1.3.5-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::yaml-0.1.4-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::pyyaml-3.10-py27_0", + "channel-1::conda-1.3.5-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order finally: sys.prefix = saved_sys_prefix @@ -1140,70 +1346,119 @@ def test_aggressive_update_packages(tmpdir): def solve(prev_state, specs_to_add, order): final_state_1, specs = prev_state specs_to_add = tuple(MatchSpec(spec_str) for spec_str in specs_to_add) - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() print(convert_to_dist_str(final_state_2)) assert convert_to_dist_str(final_state_2) == order concat_specs = specs + specs_to_add return final_state_2, concat_specs + # test with "libpng", "cmake": both have multiple versions and no requirements in "channel-1" empty_state = ((), ()) - with env_vars({"CONDA_AGGRESSIVE_UPDATE_PACKAGES": ""}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + {"CONDA_AGGRESSIVE_UPDATE_PACKAGES": ""}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): base_state = solve( - empty_state, ["libpng=1.2"], - add_subdir_to_iter(( - 'channel-1::libpng-1.2.50-0', - ))) + empty_state, + ["libpng=1.2"], + add_subdir_to_iter(("channel-1::libpng-1.2.50-0",)), + ) # # ~~has "libpng" restricted to "=1.2" by history_specs~~ NOPE! # In conda 4.6 making aggressive_update *more* aggressive, making it override history specs. state_1 = base_state - with env_vars({"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + {"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): solve( - state_1, ["cmake=2.8.9"], - add_subdir_to_iter(( - 'channel-1::cmake-2.8.9-0', - 'channel-1::libpng-1.5.13-1', - ))) - with env_vars({"CONDA_AGGRESSIVE_UPDATE_PACKAGES": ""}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + state_1, + ["cmake=2.8.9"], + add_subdir_to_iter( + ( + "channel-1::cmake-2.8.9-0", + "channel-1::libpng-1.5.13-1", + ) + ), + ) + with env_vars( + {"CONDA_AGGRESSIVE_UPDATE_PACKAGES": ""}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): state_1_2 = solve( - state_1, ["cmake=2.8.9"], - add_subdir_to_iter(( - 'channel-1::cmake-2.8.9-0', - 'channel-1::libpng-1.2.50-0', - ))) - with env_vars({"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + state_1, + ["cmake=2.8.9"], + add_subdir_to_iter( + ( + "channel-1::cmake-2.8.9-0", + "channel-1::libpng-1.2.50-0", + ) + ), + ) + with env_vars( + {"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): solve( - state_1_2, ["cmake>2.8.9"], - add_subdir_to_iter(( - 'channel-1::cmake-2.8.10.2-0', - 'channel-1::libpng-1.5.13-1', - ))) + state_1_2, + ["cmake>2.8.9"], + add_subdir_to_iter( + ( + "channel-1::cmake-2.8.10.2-0", + "channel-1::libpng-1.5.13-1", + ) + ), + ) # use new history_specs to remove "libpng" version restriction state_2 = (base_state[0], (MatchSpec("libpng"),)) - with env_vars({"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + {"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): solve( - state_2, ["cmake=2.8.9"], - add_subdir_to_iter(( - 'channel-1::cmake-2.8.9-0', - 'channel-1::libpng-1.5.13-1', - ))) - with env_vars({"CONDA_AGGRESSIVE_UPDATE_PACKAGES": ""}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + state_2, + ["cmake=2.8.9"], + add_subdir_to_iter( + ( + "channel-1::cmake-2.8.9-0", + "channel-1::libpng-1.5.13-1", + ) + ), + ) + with env_vars( + {"CONDA_AGGRESSIVE_UPDATE_PACKAGES": ""}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): state_2_2 = solve( - state_2, ["cmake=2.8.9"], - add_subdir_to_iter(( - 'channel-1::cmake-2.8.9-0', - 'channel-1::libpng-1.2.50-0', - ))) - with env_vars({"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, stack_callback=conda_tests_ctxt_mgmt_def_pol): + state_2, + ["cmake=2.8.9"], + add_subdir_to_iter( + ( + "channel-1::cmake-2.8.9-0", + "channel-1::libpng-1.2.50-0", + ) + ), + ) + with env_vars( + {"CONDA_AGGRESSIVE_UPDATE_PACKAGES": "libpng"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): solve( - state_2_2, ["cmake>2.8.9"], - add_subdir_to_iter(( - 'channel-1::cmake-2.8.10.2-0', - 'channel-1::libpng-1.5.13-1', - ))) + state_2_2, + ["cmake>2.8.9"], + add_subdir_to_iter( + ( + "channel-1::cmake-2.8.10.2-0", + "channel-1::libpng-1.5.13-1", + ) + ), + ) + def test_python2_update(tmpdir): # Here we're actually testing that a user-request will uninstall incompatible packages @@ -1212,172 +1467,200 @@ def test_python2_update(tmpdir): with get_solver_4(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order1 = add_subdir_to_iter(( - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-4::conda-env-2.6.0-1', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-4::libffi-3.2.1-hd88cf55_4', - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::yaml-0.1.7-had09818_2', - 'channel-4::zlib-1.2.11-ha838bed_2', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.24.0-h84994c4_0', - 'channel-4::python-2.7.15-h1571d57_0', - 'channel-4::asn1crypto-0.24.0-py27_0', - 'channel-4::certifi-2018.8.13-py27_0', - 'channel-4::chardet-3.0.4-py27_1', - 'channel-4::cryptography-vectors-2.3-py27_0', - 'channel-4::enum34-1.1.6-py27_1', - 'channel-4::futures-3.2.0-py27_0', - 'channel-4::idna-2.7-py27_0', - 'channel-4::ipaddress-1.0.22-py27_0', - 'channel-4::pycosat-0.6.3-py27h14c3975_0', - 'channel-4::pycparser-2.18-py27_1', - 'channel-4::pysocks-1.6.8-py27_0', - 'channel-4::ruamel_yaml-0.15.46-py27h14c3975_0', - 'channel-4::six-1.11.0-py27_1', - 'channel-4::cffi-1.11.5-py27h9745a5d_0', - 'channel-4::cryptography-2.3-py27hb7f436b_0', - 'channel-4::pyopenssl-18.0.0-py27_0', - 'channel-4::urllib3-1.23-py27_0', - 'channel-4::requests-2.19.1-py27_0', - 'channel-4::conda-4.5.10-py27_0', - )) + order1 = add_subdir_to_iter( + ( + "channel-4::ca-certificates-2018.03.07-0", + "channel-4::conda-env-2.6.0-1", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-4::libffi-3.2.1-hd88cf55_4", + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::yaml-0.1.7-had09818_2", + "channel-4::zlib-1.2.11-ha838bed_2", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.24.0-h84994c4_0", + "channel-4::python-2.7.15-h1571d57_0", + "channel-4::asn1crypto-0.24.0-py27_0", + "channel-4::certifi-2018.8.13-py27_0", + "channel-4::chardet-3.0.4-py27_1", + "channel-4::cryptography-vectors-2.3-py27_0", + "channel-4::enum34-1.1.6-py27_1", + "channel-4::futures-3.2.0-py27_0", + "channel-4::idna-2.7-py27_0", + "channel-4::ipaddress-1.0.22-py27_0", + "channel-4::pycosat-0.6.3-py27h14c3975_0", + "channel-4::pycparser-2.18-py27_1", + "channel-4::pysocks-1.6.8-py27_0", + "channel-4::ruamel_yaml-0.15.46-py27h14c3975_0", + "channel-4::six-1.11.0-py27_1", + "channel-4::cffi-1.11.5-py27h9745a5d_0", + "channel-4::cryptography-2.3-py27hb7f436b_0", + "channel-4::pyopenssl-18.0.0-py27_0", + "channel-4::urllib3-1.23-py27_0", + "channel-4::requests-2.19.1-py27_0", + "channel-4::conda-4.5.10-py27_0", + ) + ) assert convert_to_dist_str(final_state_1) == order1 - specs_to_add = MatchSpec("python=3"), - with get_solver_4(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("python=3"),) + with get_solver_4( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-4::conda-env-2.6.0-1', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-4::libffi-3.2.1-hd88cf55_4', - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::xz-5.2.4-h14c3975_4', - 'channel-4::yaml-0.1.7-had09818_2', - 'channel-4::zlib-1.2.11-ha838bed_2', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.24.0-h84994c4_0', - 'channel-4::python-3.7.0-hc3d631a_0', - 'channel-4::asn1crypto-0.24.0-py37_0', - 'channel-4::certifi-2018.8.13-py37_0', - 'channel-4::chardet-3.0.4-py37_1', - 'channel-4::idna-2.7-py37_0', - 'channel-4::pycosat-0.6.3-py37h14c3975_0', - 'channel-4::pycparser-2.18-py37_1', - 'channel-4::pysocks-1.6.8-py37_0', - 'channel-4::ruamel_yaml-0.15.46-py37h14c3975_0', - 'channel-4::six-1.11.0-py37_1', - 'channel-4::cffi-1.11.5-py37h9745a5d_0', - 'channel-4::cryptography-2.2.2-py37h14c3975_0', - 'channel-4::pyopenssl-18.0.0-py37_0', - 'channel-4::urllib3-1.23-py37_0', - 'channel-4::requests-2.19.1-py37_0', - 'channel-4::conda-4.5.10-py37_0', - )) + order = add_subdir_to_iter( + ( + "channel-4::ca-certificates-2018.03.07-0", + "channel-4::conda-env-2.6.0-1", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-4::libffi-3.2.1-hd88cf55_4", + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::xz-5.2.4-h14c3975_4", + "channel-4::yaml-0.1.7-had09818_2", + "channel-4::zlib-1.2.11-ha838bed_2", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.24.0-h84994c4_0", + "channel-4::python-3.7.0-hc3d631a_0", + "channel-4::asn1crypto-0.24.0-py37_0", + "channel-4::certifi-2018.8.13-py37_0", + "channel-4::chardet-3.0.4-py37_1", + "channel-4::idna-2.7-py37_0", + "channel-4::pycosat-0.6.3-py37h14c3975_0", + "channel-4::pycparser-2.18-py37_1", + "channel-4::pysocks-1.6.8-py37_0", + "channel-4::ruamel_yaml-0.15.46-py37h14c3975_0", + "channel-4::six-1.11.0-py37_1", + "channel-4::cffi-1.11.5-py37h9745a5d_0", + "channel-4::cryptography-2.2.2-py37h14c3975_0", + "channel-4::pyopenssl-18.0.0-py37_0", + "channel-4::urllib3-1.23-py37_0", + "channel-4::requests-2.19.1-py37_0", + "channel-4::conda-4.5.10-py37_0", + ) + ) assert convert_to_dist_str(final_state_2) == order def test_update_deps_1(tmpdir): - specs = MatchSpec("python=2"), + specs = (MatchSpec("python=2"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() # print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + ) + ) assert convert_to_dist_str(final_state_1) == order specs2 = MatchSpec("numpy=1.7.0"), MatchSpec("python=2.7.3") - with get_solver(tmpdir, specs2, prefix_records=final_state_1, history_specs=specs) as solver: + with get_solver( + tmpdir, specs2, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.3-7', - 'channel-1::nose-1.3.0-py27_0', - 'channel-1::numpy-1.7.0-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.3-7", + "channel-1::nose-1.3.0-py27_0", + "channel-1::numpy-1.7.0-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order - specs_to_add = MatchSpec("iopro"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_2, history_specs=specs2) as solver: + specs_to_add = (MatchSpec("iopro"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_2, history_specs=specs2 + ) as solver: final_state_3a = solver.solve_final_state() print(convert_to_dist_str(final_state_3a)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::unixodbc-2.3.1-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.3-7', - 'channel-1::nose-1.3.0-py27_0', - 'channel-1::numpy-1.7.0-py27_0', - 'channel-1::iopro-1.5.0-np17py27_p0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::unixodbc-2.3.1-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.3-7", + "channel-1::nose-1.3.0-py27_0", + "channel-1::numpy-1.7.0-py27_0", + "channel-1::iopro-1.5.0-np17py27_p0", + ) + ) assert convert_to_dist_str(final_state_3a) == order - specs_to_add = MatchSpec("iopro"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_2, history_specs=specs2) as solver: - final_state_3 = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_DEPS) + specs_to_add = (MatchSpec("iopro"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_2, history_specs=specs2 + ) as solver: + final_state_3 = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_DEPS + ) pprint(convert_to_dist_str(final_state_3)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::unixodbc-2.3.1-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', # with update_deps, numpy should switch from 1.7.0 to 1.7.1 - 'channel-1::nose-1.3.0-py27_0', - 'channel-1::numpy-1.7.1-py27_0', # with update_deps, numpy should switch from 1.7.0 to 1.7.1 - 'channel-1::iopro-1.5.0-np17py27_p0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::unixodbc-2.3.1-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", # with update_deps, numpy should switch from 1.7.0 to 1.7.1 + "channel-1::nose-1.3.0-py27_0", + "channel-1::numpy-1.7.1-py27_0", # with update_deps, numpy should switch from 1.7.0 to 1.7.1 + "channel-1::iopro-1.5.0-np17py27_p0", + ) + ) assert convert_to_dist_str(final_state_3) == order - specs_to_add = MatchSpec("iopro"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_2, history_specs=specs2) as solver: - final_state_3 = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_DEPS, - deps_modifier=DepsModifier.ONLY_DEPS) + specs_to_add = (MatchSpec("iopro"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_2, history_specs=specs2 + ) as solver: + final_state_3 = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_DEPS, + deps_modifier=DepsModifier.ONLY_DEPS, + ) pprint(convert_to_dist_str(final_state_3)) - order = add_subdir_to_iter(( - 'channel-1::unixodbc-2.3.1-0', - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', # with update_deps, numpy should switch from 1.7.0 to 1.7.1 - 'channel-1::nose-1.3.0-py27_0', - 'channel-1::numpy-1.7.1-py27_0', # with update_deps, numpy should switch from 1.7.0 to 1.7.1 - # 'channel-1::iopro-1.5.0-np17py27_p0', - )) + order = add_subdir_to_iter( + ( + "channel-1::unixodbc-2.3.1-0", + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", # with update_deps, numpy should switch from 1.7.0 to 1.7.1 + "channel-1::nose-1.3.0-py27_0", + "channel-1::numpy-1.7.1-py27_0", # with update_deps, numpy should switch from 1.7.0 to 1.7.1 + # 'channel-1::iopro-1.5.0-np17py27_p0', + ) + ) assert convert_to_dist_str(final_state_3) == order @@ -1386,301 +1669,399 @@ def test_update_deps_2(tmpdir): with get_solver_aggregate_2(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order1 = add_subdir_to_iter(( - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-4::libffi-3.2.1-hd88cf55_4', - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::xz-5.2.4-h14c3975_4', - 'channel-4::zlib-1.2.11-ha838bed_2', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.24.0-h84994c4_0', - 'channel-4::python-3.6.6-hc3d631a_0', - 'channel-4::certifi-2018.8.13-py36_0', - 'channel-4::click-6.7-py36_0', - 'channel-4::itsdangerous-0.24-py36_1', - 'channel-4::markupsafe-1.0-py36h14c3975_1', - 'channel-4::werkzeug-0.14.1-py36_0', - 'channel-4::setuptools-40.0.0-py36_0', - 'channel-2::jinja2-2.8-py36_1', - 'channel-2::flask-0.12-py36_0', - )) + order1 = add_subdir_to_iter( + ( + "channel-4::ca-certificates-2018.03.07-0", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-4::libffi-3.2.1-hd88cf55_4", + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::xz-5.2.4-h14c3975_4", + "channel-4::zlib-1.2.11-ha838bed_2", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.24.0-h84994c4_0", + "channel-4::python-3.6.6-hc3d631a_0", + "channel-4::certifi-2018.8.13-py36_0", + "channel-4::click-6.7-py36_0", + "channel-4::itsdangerous-0.24-py36_1", + "channel-4::markupsafe-1.0-py36h14c3975_1", + "channel-4::werkzeug-0.14.1-py36_0", + "channel-4::setuptools-40.0.0-py36_0", + "channel-2::jinja2-2.8-py36_1", + "channel-2::flask-0.12-py36_0", + ) + ) assert convert_to_dist_str(final_state_1) == order1 # The "conda update flask" case is held back by the jinja2==2.8 user-requested spec. - specs_to_add = MatchSpec("flask"), - with get_solver_aggregate_2(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("flask"),) + with get_solver_aggregate_2( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) - unlink_order = add_subdir_to_iter(( - 'channel-2::flask-0.12-py36_0', - )) - link_order = add_subdir_to_iter(( - 'channel-4::flask-0.12.2-py36hb24657c_0', - )) + unlink_order = add_subdir_to_iter(("channel-2::flask-0.12-py36_0",)) + link_order = add_subdir_to_iter(("channel-4::flask-0.12.2-py36hb24657c_0",)) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order # Now solve with UPDATE_DEPS - specs_to_add = MatchSpec("flask"), - with get_solver_aggregate_2(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: - unlink_precs, link_precs = solver.solve_for_diff(update_modifier=UpdateModifier.UPDATE_DEPS) + specs_to_add = (MatchSpec("flask"),) + with get_solver_aggregate_2( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: + unlink_precs, link_precs = solver.solve_for_diff( + update_modifier=UpdateModifier.UPDATE_DEPS + ) pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) - unlink_order = add_subdir_to_iter(( - 'channel-2::flask-0.12-py36_0', - 'channel-2::jinja2-2.8-py36_1', - )) - link_order = add_subdir_to_iter(( - 'channel-4::jinja2-2.10-py36_0', - 'channel-4::flask-1.0.2-py36_1', - )) + unlink_order = add_subdir_to_iter( + ( + "channel-2::flask-0.12-py36_0", + "channel-2::jinja2-2.8-py36_1", + ) + ) + link_order = add_subdir_to_iter( + ( + "channel-4::jinja2-2.10-py36_0", + "channel-4::flask-1.0.2-py36_1", + ) + ) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order def test_fast_update_with_update_modifier_not_set(tmpdir): - specs = MatchSpec("python=2"), MatchSpec("openssl==1.0.2l"), MatchSpec("sqlite=3.21"), + specs = ( + MatchSpec("python=2"), + MatchSpec("openssl==1.0.2l"), + MatchSpec("sqlite=3.21"), + ) with get_solver_4(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order1 = add_subdir_to_iter(( - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-4::libffi-3.2.1-hd88cf55_4', - 'channel-4::ncurses-6.0-h9df7e31_2', - 'channel-4::openssl-1.0.2l-h077ae2c_5', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::zlib-1.2.11-ha838bed_2', - 'channel-4::libedit-3.1-heed3624_0', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.21.0-h1bed415_2', - 'channel-4::python-2.7.14-h89e7a4a_22', - )) + order1 = add_subdir_to_iter( + ( + "channel-4::ca-certificates-2018.03.07-0", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-4::libffi-3.2.1-hd88cf55_4", + "channel-4::ncurses-6.0-h9df7e31_2", + "channel-4::openssl-1.0.2l-h077ae2c_5", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::zlib-1.2.11-ha838bed_2", + "channel-4::libedit-3.1-heed3624_0", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.21.0-h1bed415_2", + "channel-4::python-2.7.14-h89e7a4a_22", + ) + ) assert convert_to_dist_str(final_state_1) == order1 - specs_to_add = MatchSpec("python"), - with get_solver_4(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("python"),) + with get_solver_4( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) - unlink_order = add_subdir_to_iter(( - 'channel-4::python-2.7.14-h89e7a4a_22', - 'channel-4::libedit-3.1-heed3624_0', - 'channel-4::openssl-1.0.2l-h077ae2c_5', - 'channel-4::ncurses-6.0-h9df7e31_2' - )) - link_order = add_subdir_to_iter(( - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::xz-5.2.4-h14c3975_4', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::python-3.6.4-hc3d631a_1', # python is upgraded - )) + unlink_order = add_subdir_to_iter( + ( + "channel-4::python-2.7.14-h89e7a4a_22", + "channel-4::libedit-3.1-heed3624_0", + "channel-4::openssl-1.0.2l-h077ae2c_5", + "channel-4::ncurses-6.0-h9df7e31_2", + ) + ) + link_order = add_subdir_to_iter( + ( + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::xz-5.2.4-h14c3975_4", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::python-3.6.4-hc3d631a_1", # python is upgraded + ) + ) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order - specs_to_add = MatchSpec("sqlite"), - with get_solver_4(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("sqlite"),) + with get_solver_4( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) - unlink_order = add_subdir_to_iter(( - 'channel-4::python-2.7.14-h89e7a4a_22', - 'channel-4::sqlite-3.21.0-h1bed415_2', - 'channel-4::libedit-3.1-heed3624_0', - 'channel-4::openssl-1.0.2l-h077ae2c_5', - 'channel-4::ncurses-6.0-h9df7e31_2', - )) - link_order = add_subdir_to_iter(( - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::sqlite-3.24.0-h84994c4_0', # sqlite is upgraded - 'channel-4::python-2.7.15-h1571d57_0', # python is not upgraded - )) + unlink_order = add_subdir_to_iter( + ( + "channel-4::python-2.7.14-h89e7a4a_22", + "channel-4::sqlite-3.21.0-h1bed415_2", + "channel-4::libedit-3.1-heed3624_0", + "channel-4::openssl-1.0.2l-h077ae2c_5", + "channel-4::ncurses-6.0-h9df7e31_2", + ) + ) + link_order = add_subdir_to_iter( + ( + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::sqlite-3.24.0-h84994c4_0", # sqlite is upgraded + "channel-4::python-2.7.15-h1571d57_0", # python is not upgraded + ) + ) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order - specs_to_add = MatchSpec("sqlite"), MatchSpec("python"), - with get_solver_4(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: - final_state_2 = solver.solve_final_state(update_modifier=UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE) + specs_to_add = ( + MatchSpec("sqlite"), + MatchSpec("python"), + ) + with get_solver_4( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: + final_state_2 = solver.solve_final_state( + update_modifier=UpdateModifier.SPECS_SATISFIED_SKIP_SOLVE + ) pprint(convert_to_dist_str(final_state_2)) assert convert_to_dist_str(final_state_2) == order1 @pytest.mark.integration def test_pinned_1(tmpdir): - specs = MatchSpec("numpy"), + specs = (MatchSpec("numpy"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-3.3.2-0', - 'channel-1::numpy-1.7.1-py33_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-3.3.2-0", + "channel-1::numpy-1.7.1-py33_0", + ) + ) assert convert_to_dist_str(final_state_1) == order - with env_var("CONDA_PINNED_PACKAGES", "python=2.6&iopro<=1.4.2", stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs = MatchSpec("system=5.8=0"), + with env_var( + "CONDA_PINNED_PACKAGES", + "python=2.6&iopro<=1.4.2", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + specs = (MatchSpec("system=5.8=0"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::system-5.8-0', - )) + order = add_subdir_to_iter(("channel-1::system-5.8-0",)) assert convert_to_dist_str(final_state_1) == order # ignore_pinned=True - specs_to_add = MatchSpec("python"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_add = (MatchSpec("python"),) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: final_state_2 = solver.solve_final_state(ignore_pinned=True) # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-0', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-3.3.2-0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-0", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-3.3.2-0", + ) + ) assert convert_to_dist_str(final_state_2) == order # ignore_pinned=False - specs_to_add = MatchSpec("python"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_add = (MatchSpec("python"),) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: final_state_2 = solver.solve_final_state(ignore_pinned=False) # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-0', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.6.8-6', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-0", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.6.8-6", + ) + ) assert convert_to_dist_str(final_state_2) == order # incompatible CLI and configured specs - specs_to_add = MatchSpec("scikit-learn==0.13"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_add = (MatchSpec("scikit-learn==0.13"),) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: with pytest.raises(SpecsConfigurationConflictError) as exc: solver.solve_final_state(ignore_pinned=False) kwargs = exc.value._kwargs assert kwargs["requested_specs"] == ["scikit-learn==0.13"] assert kwargs["pinned_specs"] == ["python=2.6"] - specs_to_add = MatchSpec("numba"), - history_specs = MatchSpec("python"), MatchSpec("system=5.8=0"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_2, - history_specs=history_specs) as solver: + specs_to_add = (MatchSpec("numba"),) + history_specs = ( + MatchSpec("python"), + MatchSpec("system=5.8=0"), + ) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_2, + history_specs=history_specs, + ) as solver: final_state_3 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_3)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-0', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.6.8-6', - 'channel-1::argparse-1.2.1-py26_0', - 'channel-1::llvmpy-0.11.2-py26_0', - 'channel-1::numpy-1.7.1-py26_0', - 'channel-1::numba-0.8.1-np17py26_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-0", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.6.8-6", + "channel-1::argparse-1.2.1-py26_0", + "channel-1::llvmpy-0.11.2-py26_0", + "channel-1::numpy-1.7.1-py26_0", + "channel-1::numba-0.8.1-np17py26_0", + ) + ) assert convert_to_dist_str(final_state_3) == order - specs_to_add = MatchSpec("python"), - history_specs = MatchSpec("python"), MatchSpec("system=5.8=0"), MatchSpec("numba"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_3, - history_specs=history_specs) as solver: - final_state_4 = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_DEPS) + specs_to_add = (MatchSpec("python"),) + history_specs = ( + MatchSpec("python"), + MatchSpec("system=5.8=0"), + MatchSpec("numba"), + ) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_3, + history_specs=history_specs, + ) as solver: + final_state_4 = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_DEPS + ) # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_4)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.6.8-6', - 'channel-1::argparse-1.2.1-py26_0', - 'channel-1::llvmpy-0.11.2-py26_0', - 'channel-1::numpy-1.7.1-py26_0', - 'channel-1::numba-0.8.1-np17py26_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.6.8-6", + "channel-1::argparse-1.2.1-py26_0", + "channel-1::llvmpy-0.11.2-py26_0", + "channel-1::numpy-1.7.1-py26_0", + "channel-1::numba-0.8.1-np17py26_0", + ) + ) assert convert_to_dist_str(final_state_4) == order - specs_to_add = MatchSpec("python"), - history_specs = MatchSpec("python"), MatchSpec("system=5.8=0"), MatchSpec("numba"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_4, - history_specs=history_specs) as solver: - final_state_5 = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_ALL) + specs_to_add = (MatchSpec("python"),) + history_specs = ( + MatchSpec("python"), + MatchSpec("system=5.8=0"), + MatchSpec("numba"), + ) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_4, + history_specs=history_specs, + ) as solver: + final_state_5 = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_ALL + ) # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_5)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-2.6.8-6', - 'channel-1::argparse-1.2.1-py26_0', - 'channel-1::llvmpy-0.11.2-py26_0', - 'channel-1::numpy-1.7.1-py26_0', - 'channel-1::numba-0.8.1-np17py26_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-2.6.8-6", + "channel-1::argparse-1.2.1-py26_0", + "channel-1::llvmpy-0.11.2-py26_0", + "channel-1::numpy-1.7.1-py26_0", + "channel-1::numba-0.8.1-np17py26_0", + ) + ) assert convert_to_dist_str(final_state_5) == order # now update without pinning - specs_to_add = MatchSpec("python"), - history_specs = MatchSpec("python"), MatchSpec("system=5.8=0"), MatchSpec("numba"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_4, - history_specs=history_specs) as solver: - final_state_5 = solver.solve_final_state(update_modifier=UpdateModifier.UPDATE_ALL) + specs_to_add = (MatchSpec("python"),) + history_specs = ( + MatchSpec("python"), + MatchSpec("system=5.8=0"), + MatchSpec("numba"), + ) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_4, + history_specs=history_specs, + ) as solver: + final_state_5 = solver.solve_final_state( + update_modifier=UpdateModifier.UPDATE_ALL + ) # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_5)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::llvm-3.2-0', - 'channel-1::python-3.3.2-0', - 'channel-1::llvmpy-0.11.2-py33_0', - 'channel-1::numpy-1.7.1-py33_0', - 'channel-1::numba-0.8.1-np17py33_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::llvm-3.2-0", + "channel-1::python-3.3.2-0", + "channel-1::llvmpy-0.11.2-py33_0", + "channel-1::numpy-1.7.1-py33_0", + "channel-1::numba-0.8.1-np17py33_0", + ) + ) assert convert_to_dist_str(final_state_5) == order @@ -1688,84 +2069,100 @@ def test_no_update_deps_1(tmpdir): # i.e. FREEZE_DEPS # NOTE: So far, NOT actually testing the FREEZE_DEPS flag. I'm unable to contrive a # situation where it's actually needed. - specs = MatchSpec("python=2"), + specs = (MatchSpec("python=2"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + ) + ) assert convert_to_dist_str(final_state_1) == order - specs_to_add = MatchSpec("zope.interface"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("zope.interface"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - 'channel-1::nose-1.3.0-py27_0', - 'channel-1::zope.interface-4.0.5-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + "channel-1::nose-1.3.0-py27_0", + "channel-1::zope.interface-4.0.5-py27_0", + ) + ) assert convert_to_dist_str(final_state_2) == order - specs_to_add = MatchSpec("zope.interface>4.1"), - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("zope.interface>4.1"),) + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: with pytest.raises(UnsatisfiableError): final_state_2 = solver.solve_final_state() # allow python to float specs_to_add = MatchSpec("zope.interface>4.1"), MatchSpec("python") - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() # PrefixDag(final_state_2, specs).open_url() print(convert_to_dist_str(final_state_2)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-3.3.2-0', - 'channel-1::nose-1.3.0-py33_0', - 'channel-1::zope.interface-4.1.1.1-py33_0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-3.3.2-0", + "channel-1::nose-1.3.0-py33_0", + "channel-1::zope.interface-4.1.1.1-py33_0", + ) + ) assert convert_to_dist_str(final_state_2) == order def test_force_reinstall_1(tmpdir): - specs = MatchSpec("python=2"), + specs = (MatchSpec("python=2"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + ) + ) assert convert_to_dist_str(final_state_1) == order specs_to_add = specs - with get_solver(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + with get_solver( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: unlink_dists, link_dists = solver.solve_for_diff() assert not unlink_dists assert not link_dists @@ -1780,60 +2177,73 @@ def test_force_reinstall_1(tmpdir): def test_force_reinstall_2(tmpdir): - specs = MatchSpec("python=2"), + specs = (MatchSpec("python=2"),) with get_solver(tmpdir, specs) as solver: unlink_dists, link_dists = solver.solve_for_diff(force_reinstall=True) assert not unlink_dists # PrefixDag(final_state_1, specs).open_url() print(convert_to_dist_str(link_dists)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.7.5-0', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.7.5-0", + ) + ) assert convert_to_dist_str(link_dists) == order def test_timestamps_1(tmpdir): - specs = MatchSpec("python=3.6.2"), + specs = (MatchSpec("python=3.6.2"),) with get_solver_4(tmpdir, specs) as solver: unlink_dists, link_dists = solver.solve_for_diff(force_reinstall=True) assert not unlink_dists pprint(convert_to_dist_str(link_dists)) - order = add_subdir_to_iter(( - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-4::libffi-3.2.1-hd88cf55_4', - 'channel-4::ncurses-6.0-h9df7e31_2', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::xz-5.2.4-h14c3975_4', - 'channel-4::zlib-1.2.11-ha838bed_2', - 'channel-4::libedit-3.1-heed3624_0', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.23.1-he433501_0', - 'channel-4::python-3.6.2-hca45abc_19', # this package has a later timestamp but lower hash value - # than the alternate 'channel-4::python-3.6.2-hda45abc_19' - )) + order = add_subdir_to_iter( + ( + "channel-4::ca-certificates-2018.03.07-0", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-4::libffi-3.2.1-hd88cf55_4", + "channel-4::ncurses-6.0-h9df7e31_2", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::xz-5.2.4-h14c3975_4", + "channel-4::zlib-1.2.11-ha838bed_2", + "channel-4::libedit-3.1-heed3624_0", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.23.1-he433501_0", + "channel-4::python-3.6.2-hca45abc_19", # this package has a later timestamp but lower hash value + # than the alternate 'channel-4::python-3.6.2-hda45abc_19' + ) + ) assert convert_to_dist_str(link_dists) == order + def test_channel_priority_churn_minimized(tmpdir): - specs = MatchSpec("conda-build"), MatchSpec("itsdangerous"), + specs = ( + MatchSpec("conda-build"), + MatchSpec("itsdangerous"), + ) with get_solver_aggregate_2(tmpdir, specs) as solver: final_state = solver.solve_final_state() pprint(convert_to_dist_str(final_state)) - with get_solver_aggregate_2(tmpdir, [MatchSpec('itsdangerous')], - prefix_records=final_state, history_specs=specs) as solver: + with get_solver_aggregate_2( + tmpdir, + [MatchSpec("itsdangerous")], + prefix_records=final_state, + history_specs=specs, + ) as solver: solver.channels.reverse() unlink_dists, link_dists = solver.solve_for_diff( - update_modifier=UpdateModifier.FREEZE_INSTALLED) + update_modifier=UpdateModifier.FREEZE_INSTALLED + ) pprint(convert_to_dist_str(unlink_dists)) pprint(convert_to_dist_str(link_dists)) assert len(unlink_dists) == 1 @@ -1843,136 +2253,176 @@ def test_channel_priority_churn_minimized(tmpdir): def test_remove_with_constrained_dependencies(tmpdir): # This is a regression test for #6904. Up through conda 4.4.10, removal isn't working # correctly with constrained dependencies. - specs = MatchSpec("conda"), MatchSpec("conda-build"), + specs = ( + MatchSpec("conda"), + MatchSpec("conda-build"), + ) with get_solver_4(tmpdir, specs) as solver: unlink_dists_1, link_dists_1 = solver.solve_for_diff() assert not unlink_dists_1 pprint(convert_to_dist_str(link_dists_1)) assert not unlink_dists_1 - order = add_subdir_to_iter(( - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-4::conda-env-2.6.0-1', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-4::libffi-3.2.1-hd88cf55_4', - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::patchelf-0.9-hf484d3e_2', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::xz-5.2.4-h14c3975_4', - 'channel-4::yaml-0.1.7-had09818_2', - 'channel-4::zlib-1.2.11-ha838bed_2', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.24.0-h84994c4_0', - 'channel-4::python-3.7.0-hc3d631a_0', - 'channel-4::asn1crypto-0.24.0-py37_0', - 'channel-4::beautifulsoup4-4.6.3-py37_0', - 'channel-4::certifi-2018.8.13-py37_0', - 'channel-4::chardet-3.0.4-py37_1', - 'channel-4::cryptography-vectors-2.3-py37_0', - 'channel-4::filelock-3.0.4-py37_0', - 'channel-4::glob2-0.6-py37_0', - 'channel-4::idna-2.7-py37_0', - 'channel-4::markupsafe-1.0-py37h14c3975_1', - 'channel-4::pkginfo-1.4.2-py37_1', - 'channel-4::psutil-5.4.6-py37h14c3975_0', - 'channel-4::pycosat-0.6.3-py37h14c3975_0', - 'channel-4::pycparser-2.18-py37_1', - 'channel-4::pysocks-1.6.8-py37_0', - 'channel-4::pyyaml-3.13-py37h14c3975_0', - 'channel-4::ruamel_yaml-0.15.46-py37h14c3975_0', - 'channel-4::six-1.11.0-py37_1', - 'channel-4::cffi-1.11.5-py37h9745a5d_0', - 'channel-4::setuptools-40.0.0-py37_0', - 'channel-4::cryptography-2.3-py37hb7f436b_0', - 'channel-4::jinja2-2.10-py37_0', - 'channel-4::pyopenssl-18.0.0-py37_0', - 'channel-4::urllib3-1.23-py37_0', - 'channel-4::requests-2.19.1-py37_0', - 'channel-4::conda-4.5.10-py37_0', - 'channel-4::conda-build-3.12.1-py37_0', - )) + order = add_subdir_to_iter( + ( + "channel-4::ca-certificates-2018.03.07-0", + "channel-4::conda-env-2.6.0-1", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-4::libffi-3.2.1-hd88cf55_4", + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::patchelf-0.9-hf484d3e_2", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::xz-5.2.4-h14c3975_4", + "channel-4::yaml-0.1.7-had09818_2", + "channel-4::zlib-1.2.11-ha838bed_2", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.24.0-h84994c4_0", + "channel-4::python-3.7.0-hc3d631a_0", + "channel-4::asn1crypto-0.24.0-py37_0", + "channel-4::beautifulsoup4-4.6.3-py37_0", + "channel-4::certifi-2018.8.13-py37_0", + "channel-4::chardet-3.0.4-py37_1", + "channel-4::cryptography-vectors-2.3-py37_0", + "channel-4::filelock-3.0.4-py37_0", + "channel-4::glob2-0.6-py37_0", + "channel-4::idna-2.7-py37_0", + "channel-4::markupsafe-1.0-py37h14c3975_1", + "channel-4::pkginfo-1.4.2-py37_1", + "channel-4::psutil-5.4.6-py37h14c3975_0", + "channel-4::pycosat-0.6.3-py37h14c3975_0", + "channel-4::pycparser-2.18-py37_1", + "channel-4::pysocks-1.6.8-py37_0", + "channel-4::pyyaml-3.13-py37h14c3975_0", + "channel-4::ruamel_yaml-0.15.46-py37h14c3975_0", + "channel-4::six-1.11.0-py37_1", + "channel-4::cffi-1.11.5-py37h9745a5d_0", + "channel-4::setuptools-40.0.0-py37_0", + "channel-4::cryptography-2.3-py37hb7f436b_0", + "channel-4::jinja2-2.10-py37_0", + "channel-4::pyopenssl-18.0.0-py37_0", + "channel-4::urllib3-1.23-py37_0", + "channel-4::requests-2.19.1-py37_0", + "channel-4::conda-4.5.10-py37_0", + "channel-4::conda-build-3.12.1-py37_0", + ) + ) assert convert_to_dist_str(link_dists_1) == order - specs_to_remove = MatchSpec("pycosat"), - with get_solver_4(tmpdir, specs_to_remove=specs_to_remove, prefix_records=link_dists_1, history_specs=specs) as solver: + specs_to_remove = (MatchSpec("pycosat"),) + with get_solver_4( + tmpdir, + specs_to_remove=specs_to_remove, + prefix_records=link_dists_1, + history_specs=specs, + ) as solver: unlink_dists_2, link_dists_2 = solver.solve_for_diff() assert not link_dists_2 pprint(convert_to_dist_str(unlink_dists_2)) - order = add_subdir_to_iter(( - 'channel-4::conda-build-3.12.1-py37_0', - 'channel-4::conda-4.5.10-py37_0', - 'channel-4::pycosat-0.6.3-py37h14c3975_0', - )) + order = add_subdir_to_iter( + ( + "channel-4::conda-build-3.12.1-py37_0", + "channel-4::conda-4.5.10-py37_0", + "channel-4::pycosat-0.6.3-py37h14c3975_0", + ) + ) for spec in order: assert spec in convert_to_dist_str(unlink_dists_2) def test_priority_1(tmpdir): - with env_var("CONDA_SUBDIR", "linux-64", stack_callback=conda_tests_ctxt_mgmt_def_pol): - specs = MatchSpec("pandas"), MatchSpec("python=2.7"), - with env_var("CONDA_CHANNEL_PRIORITY", "True", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_SUBDIR", "linux-64", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + specs = ( + MatchSpec("pandas"), + MatchSpec("python=2.7"), + ) + with env_var( + "CONDA_CHANNEL_PRIORITY", + "True", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with get_solver_aggregate_1(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-2::mkl-2017.0.3-0', - 'channel-2::openssl-1.0.2l-0', - 'channel-2::readline-6.2-2', - 'channel-2::sqlite-3.13.0-0', - 'channel-2::tk-8.5.18-0', - 'channel-2::zlib-1.2.11-0', - 'channel-2::python-2.7.13-0', - 'channel-2::numpy-1.13.1-py27_0', - 'channel-2::pytz-2017.2-py27_0', - 'channel-2::six-1.10.0-py27_0', - 'channel-2::python-dateutil-2.6.1-py27_0', - 'channel-2::pandas-0.20.3-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-2::mkl-2017.0.3-0", + "channel-2::openssl-1.0.2l-0", + "channel-2::readline-6.2-2", + "channel-2::sqlite-3.13.0-0", + "channel-2::tk-8.5.18-0", + "channel-2::zlib-1.2.11-0", + "channel-2::python-2.7.13-0", + "channel-2::numpy-1.13.1-py27_0", + "channel-2::pytz-2017.2-py27_0", + "channel-2::six-1.10.0-py27_0", + "channel-2::python-dateutil-2.6.1-py27_0", + "channel-2::pandas-0.20.3-py27_0", + ) + ) assert convert_to_dist_str(final_state_1) == order - with env_var("CONDA_CHANNEL_PRIORITY", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol): - with get_solver_aggregate_1(tmpdir, specs, prefix_records=final_state_1, - history_specs=specs) as solver: + with env_var( + "CONDA_CHANNEL_PRIORITY", + "False", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with get_solver_aggregate_1( + tmpdir, specs, prefix_records=final_state_1, history_specs=specs + ) as solver: final_state_2 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_2)) # python and pandas will be updated as they are explicit specs. Other stuff may or may not, # as required to satisfy python and pandas - order = add_subdir_to_iter(( - 'channel-4::python-2.7.15-h1571d57_0', - 'channel-4::pandas-0.23.4-py27h04863e7_0', - )) + order = add_subdir_to_iter( + ( + "channel-4::python-2.7.15-h1571d57_0", + "channel-4::pandas-0.23.4-py27h04863e7_0", + ) + ) for spec in order: assert spec in convert_to_dist_str(final_state_2) # channel priority taking effect here. channel-2 should be the channel to draw from. Downgrades expected. # python and pandas will be updated as they are explicit specs. Other stuff may or may not, # as required to satisfy python and pandas - with get_solver_aggregate_1(tmpdir, specs, prefix_records=final_state_2, - history_specs=specs) as solver: + with get_solver_aggregate_1( + tmpdir, specs, prefix_records=final_state_2, history_specs=specs + ) as solver: final_state_3 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_3)) - order = add_subdir_to_iter(( - 'channel-2::python-2.7.13-0', - 'channel-2::pandas-0.20.3-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-2::python-2.7.13-0", + "channel-2::pandas-0.20.3-py27_0", + ) + ) for spec in order: assert spec in convert_to_dist_str(final_state_3) - specs_to_add = MatchSpec("six<1.10"), - specs_to_remove = MatchSpec("pytz"), - with get_solver_aggregate_1(tmpdir, specs_to_add=specs_to_add, specs_to_remove=specs_to_remove, - prefix_records=final_state_3, history_specs=specs) as solver: + specs_to_add = (MatchSpec("six<1.10"),) + specs_to_remove = (MatchSpec("pytz"),) + with get_solver_aggregate_1( + tmpdir, + specs_to_add=specs_to_add, + specs_to_remove=specs_to_remove, + prefix_records=final_state_3, + history_specs=specs, + ) as solver: final_state_4 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_4)) - order = add_subdir_to_iter(( - 'channel-2::python-2.7.13-0', - 'channel-2::six-1.9.0-py27_0', - )) + order = add_subdir_to_iter( + ( + "channel-2::python-2.7.13-0", + "channel-2::six-1.9.0-py27_0", + ) + ) for spec in order: assert spec in convert_to_dist_str(final_state_4) - assert 'pandas' not in convert_to_dist_str(final_state_4) + assert "pandas" not in convert_to_dist_str(final_state_4) def test_features_solve_1(tmpdir): @@ -1980,122 +2430,146 @@ def test_features_solve_1(tmpdir): # and channel-4 is a view of the newer pkgs/main/linux-64 # The channel list, equivalent to context.channels is ('channel-2', 'channel-4') specs = (MatchSpec("python=2.7"), MatchSpec("numpy"), MatchSpec("nomkl")) - with env_var("CONDA_CHANNEL_PRIORITY", "True", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CHANNEL_PRIORITY", "True", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with get_solver_aggregate_1(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-2::nomkl-1.0-0', - 'channel-2::libgfortran-3.0.0-1', - 'channel-2::openssl-1.0.2l-0', - 'channel-2::readline-6.2-2', - 'channel-2::sqlite-3.13.0-0', - 'channel-2::tk-8.5.18-0', - 'channel-2::zlib-1.2.11-0', - 'channel-2::openblas-0.2.19-0', - 'channel-2::python-2.7.13-0', - 'channel-2::numpy-1.13.1-py27_nomkl_0', - )) + order = add_subdir_to_iter( + ( + "channel-2::nomkl-1.0-0", + "channel-2::libgfortran-3.0.0-1", + "channel-2::openssl-1.0.2l-0", + "channel-2::readline-6.2-2", + "channel-2::sqlite-3.13.0-0", + "channel-2::tk-8.5.18-0", + "channel-2::zlib-1.2.11-0", + "channel-2::openblas-0.2.19-0", + "channel-2::python-2.7.13-0", + "channel-2::numpy-1.13.1-py27_nomkl_0", + ) + ) assert convert_to_dist_str(final_state_1) == order - with env_var("CONDA_CHANNEL_PRIORITY", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CHANNEL_PRIORITY", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with get_solver_aggregate_1(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-4::blas-1.0-openblas', - 'channel-4::ca-certificates-2018.03.07-0', - 'channel-2::libffi-3.2.1-1', - 'channel-4::libgcc-ng-8.2.0-hdf63c60_0', - 'channel-4::libgfortran-ng-7.2.0-hdf63c60_3', - 'channel-4::libstdcxx-ng-8.2.0-hdf63c60_0', - 'channel-2::zlib-1.2.11-0', - 'channel-4::libopenblas-0.2.20-h9ac9557_7', - 'channel-4::ncurses-6.1-hf484d3e_0', - 'channel-4::nomkl-3.0-0', - 'channel-4::openssl-1.0.2p-h14c3975_0', - 'channel-4::tk-8.6.7-hc745277_3', - 'channel-4::libedit-3.1.20170329-h6b74fdf_2', - 'channel-4::readline-7.0-ha6073c6_4', - 'channel-4::sqlite-3.24.0-h84994c4_0', - 'channel-4::python-2.7.15-h1571d57_0', - 'channel-4::numpy-base-1.15.0-py27h7cdd4dd_0', - 'channel-4::numpy-1.15.0-py27h2aefc1b_0', - )) + order = add_subdir_to_iter( + ( + "channel-4::blas-1.0-openblas", + "channel-4::ca-certificates-2018.03.07-0", + "channel-2::libffi-3.2.1-1", + "channel-4::libgcc-ng-8.2.0-hdf63c60_0", + "channel-4::libgfortran-ng-7.2.0-hdf63c60_3", + "channel-4::libstdcxx-ng-8.2.0-hdf63c60_0", + "channel-2::zlib-1.2.11-0", + "channel-4::libopenblas-0.2.20-h9ac9557_7", + "channel-4::ncurses-6.1-hf484d3e_0", + "channel-4::nomkl-3.0-0", + "channel-4::openssl-1.0.2p-h14c3975_0", + "channel-4::tk-8.6.7-hc745277_3", + "channel-4::libedit-3.1.20170329-h6b74fdf_2", + "channel-4::readline-7.0-ha6073c6_4", + "channel-4::sqlite-3.24.0-h84994c4_0", + "channel-4::python-2.7.15-h1571d57_0", + "channel-4::numpy-base-1.15.0-py27h7cdd4dd_0", + "channel-4::numpy-1.15.0-py27h2aefc1b_0", + ) + ) assert convert_to_dist_str(final_state_1) == order @pytest.mark.integration # this test is slower, so we'll lump it into integration def test_freeze_deps_1(tmpdir): - specs = MatchSpec("six=1.7"), + specs = (MatchSpec("six=1.7"),) with get_solver_2(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-2::openssl-1.0.2l-0', - 'channel-2::readline-6.2-2', - 'channel-2::sqlite-3.13.0-0', - 'channel-2::tk-8.5.18-0', - 'channel-2::xz-5.2.3-0', - 'channel-2::zlib-1.2.11-0', - 'channel-2::python-3.4.5-0', - 'channel-2::six-1.7.3-py34_0', - )) + order = add_subdir_to_iter( + ( + "channel-2::openssl-1.0.2l-0", + "channel-2::readline-6.2-2", + "channel-2::sqlite-3.13.0-0", + "channel-2::tk-8.5.18-0", + "channel-2::xz-5.2.3-0", + "channel-2::zlib-1.2.11-0", + "channel-2::python-3.4.5-0", + "channel-2::six-1.7.3-py34_0", + ) + ) assert convert_to_dist_str(final_state_1) == order - specs_to_add = MatchSpec("bokeh"), - with get_solver_2(tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: + specs_to_add = (MatchSpec("bokeh"),) + with get_solver_2( + tmpdir, specs_to_add, prefix_records=final_state_1, history_specs=specs + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) unlink_order = () - link_order = add_subdir_to_iter(( - 'channel-2::mkl-2017.0.3-0', - 'channel-2::yaml-0.1.6-0', - 'channel-2::backports_abc-0.5-py34_0', - 'channel-2::markupsafe-1.0-py34_0', - 'channel-2::numpy-1.13.0-py34_0', - 'channel-2::pyyaml-3.12-py34_0', - 'channel-2::requests-2.14.2-py34_0', - 'channel-2::setuptools-27.2.0-py34_0', - 'channel-2::jinja2-2.9.6-py34_0', - 'channel-2::python-dateutil-2.6.1-py34_0', - 'channel-2::tornado-4.4.2-py34_0', - 'channel-2::bokeh-0.12.4-py34_0', - )) + link_order = add_subdir_to_iter( + ( + "channel-2::mkl-2017.0.3-0", + "channel-2::yaml-0.1.6-0", + "channel-2::backports_abc-0.5-py34_0", + "channel-2::markupsafe-1.0-py34_0", + "channel-2::numpy-1.13.0-py34_0", + "channel-2::pyyaml-3.12-py34_0", + "channel-2::requests-2.14.2-py34_0", + "channel-2::setuptools-27.2.0-py34_0", + "channel-2::jinja2-2.9.6-py34_0", + "channel-2::python-dateutil-2.6.1-py34_0", + "channel-2::tornado-4.4.2-py34_0", + "channel-2::bokeh-0.12.4-py34_0", + ) + ) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order # now we can't install the latest bokeh 0.12.5, but instead we get bokeh 0.12.4 - specs_to_add = MatchSpec("bokeh"), - with get_solver_2(tmpdir, specs_to_add, prefix_records=final_state_1, - history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4"))) as solver: + specs_to_add = (MatchSpec("bokeh"),) + with get_solver_2( + tmpdir, + specs_to_add, + prefix_records=final_state_1, + history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4")), + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) unlink_order = () - link_order = add_subdir_to_iter(( - 'channel-2::mkl-2017.0.3-0', - 'channel-2::yaml-0.1.6-0', - 'channel-2::backports_abc-0.5-py34_0', - 'channel-2::markupsafe-1.0-py34_0', - 'channel-2::numpy-1.13.0-py34_0', - 'channel-2::pyyaml-3.12-py34_0', - 'channel-2::requests-2.14.2-py34_0', - 'channel-2::setuptools-27.2.0-py34_0', - 'channel-2::jinja2-2.9.6-py34_0', - 'channel-2::python-dateutil-2.6.1-py34_0', - 'channel-2::tornado-4.4.2-py34_0', - 'channel-2::bokeh-0.12.4-py34_0', - )) + link_order = add_subdir_to_iter( + ( + "channel-2::mkl-2017.0.3-0", + "channel-2::yaml-0.1.6-0", + "channel-2::backports_abc-0.5-py34_0", + "channel-2::markupsafe-1.0-py34_0", + "channel-2::numpy-1.13.0-py34_0", + "channel-2::pyyaml-3.12-py34_0", + "channel-2::requests-2.14.2-py34_0", + "channel-2::setuptools-27.2.0-py34_0", + "channel-2::jinja2-2.9.6-py34_0", + "channel-2::python-dateutil-2.6.1-py34_0", + "channel-2::tornado-4.4.2-py34_0", + "channel-2::bokeh-0.12.4-py34_0", + ) + ) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order # here, the python=3.4 spec can't be satisfied, so it's dropped, and we go back to py27 with pytest.raises(UnsatisfiableError): - specs_to_add = MatchSpec("bokeh=0.12.5"), - with get_solver_2(tmpdir, specs_to_add, prefix_records=final_state_1, - history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4"))) as solver: + specs_to_add = (MatchSpec("bokeh=0.12.5"),) + with get_solver_2( + tmpdir, + specs_to_add, + prefix_records=final_state_1, + history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4")), + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() # adding the explicit python spec allows conda to change the python versions. @@ -2103,44 +2577,56 @@ def test_freeze_deps_1(tmpdir): # explicit "six=1.7" request in the history. It should only neuter that spec if there's no way # to solve it with that spec. specs_to_add = MatchSpec("bokeh=0.12.5"), MatchSpec("python") - with get_solver_2(tmpdir, specs_to_add, prefix_records=final_state_1, - history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4"))) as solver: + with get_solver_2( + tmpdir, + specs_to_add, + prefix_records=final_state_1, + history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4")), + ) as solver: unlink_precs, link_precs = solver.solve_for_diff() pprint(convert_to_dist_str(unlink_precs)) pprint(convert_to_dist_str(link_precs)) - unlink_order = add_subdir_to_iter(( - 'channel-2::six-1.7.3-py34_0', - 'channel-2::python-3.4.5-0', - 'channel-2::xz-5.2.3-0', - )) - link_order = add_subdir_to_iter(( - 'channel-2::mkl-2017.0.3-0', - 'channel-2::yaml-0.1.6-0', - 'channel-2::python-2.7.13-0', - 'channel-2::backports-1.0-py27_0', - 'channel-2::backports_abc-0.5-py27_0', - 'channel-2::certifi-2016.2.28-py27_0', - 'channel-2::futures-3.1.1-py27_0', - 'channel-2::markupsafe-1.0-py27_0', - 'channel-2::numpy-1.13.1-py27_0', - 'channel-2::pyyaml-3.12-py27_0', - 'channel-2::requests-2.14.2-py27_0', - 'channel-2::six-1.7.3-py27_0', - 'channel-2::python-dateutil-2.6.1-py27_0', - 'channel-2::setuptools-36.4.0-py27_1', - 'channel-2::singledispatch-3.4.0.3-py27_0', - 'channel-2::ssl_match_hostname-3.5.0.1-py27_0', - 'channel-2::jinja2-2.9.6-py27_0', - 'channel-2::tornado-4.5.2-py27_0', - 'channel-2::bokeh-0.12.5-py27_1', - )) + unlink_order = add_subdir_to_iter( + ( + "channel-2::six-1.7.3-py34_0", + "channel-2::python-3.4.5-0", + "channel-2::xz-5.2.3-0", + ) + ) + link_order = add_subdir_to_iter( + ( + "channel-2::mkl-2017.0.3-0", + "channel-2::yaml-0.1.6-0", + "channel-2::python-2.7.13-0", + "channel-2::backports-1.0-py27_0", + "channel-2::backports_abc-0.5-py27_0", + "channel-2::certifi-2016.2.28-py27_0", + "channel-2::futures-3.1.1-py27_0", + "channel-2::markupsafe-1.0-py27_0", + "channel-2::numpy-1.13.1-py27_0", + "channel-2::pyyaml-3.12-py27_0", + "channel-2::requests-2.14.2-py27_0", + "channel-2::six-1.7.3-py27_0", + "channel-2::python-dateutil-2.6.1-py27_0", + "channel-2::setuptools-36.4.0-py27_1", + "channel-2::singledispatch-3.4.0.3-py27_0", + "channel-2::ssl_match_hostname-3.5.0.1-py27_0", + "channel-2::jinja2-2.9.6-py27_0", + "channel-2::tornado-4.5.2-py27_0", + "channel-2::bokeh-0.12.5-py27_1", + ) + ) assert convert_to_dist_str(unlink_precs) == unlink_order assert convert_to_dist_str(link_precs) == link_order # here, the python=3.4 spec can't be satisfied, so it's dropped, and we go back to py27 - specs_to_add = MatchSpec("bokeh=0.12.5"), - with get_solver_2(tmpdir, specs_to_add, prefix_records=final_state_1, - history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4"))) as solver: + specs_to_add = (MatchSpec("bokeh=0.12.5"),) + with get_solver_2( + tmpdir, + specs_to_add, + prefix_records=final_state_1, + history_specs=(MatchSpec("six=1.7"), MatchSpec("python=3.4")), + ) as solver: with pytest.raises(UnsatisfiableError): solver.solve_final_state(update_modifier=UpdateModifier.FREEZE_INSTALLED) @@ -2173,64 +2659,69 @@ def test_freeze_deps_1(tmpdir): # reset_context() - # @patch.object(Context, 'prefix_specified') - # def test_simple_install_uninstall(self, prefix_specified): - # prefix_specified.__get__ = Mock(return_value=False) - # - # specs = MatchSpec("spiffy-test-app"), - # with get_solver_3(specs) as solver: - # final_state_1 = solver.solve_final_state() - # # PrefixDag(final_state_1, specs).open_url() - # print(convert_to_dist_str(final_state_1)) - # order = ( - # 'channel-1::openssl-1.0.2l-0', - # 'channel-1::readline-6.2-2', - # 'channel-1::sqlite-3.13.0-0', - # 'channel-1::tk-8.5.18-0', - # 'channel-1::zlib-1.2.8-3', - # 'channel-1::python-2.7.13-0', - # 'channel-1::spiffy-test-app-2.0-py27hf99fac9_0', - # ) - # assert tuple(final_state_1) == tuple(solver._index[Dist(d)] for d in order) - # - # specs_to_add = MatchSpec("uses-spiffy-test-app"), - # with get_solver_3(specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: - # final_state_2 = solver.solve_final_state() - # # PrefixDag(final_state_2, specs).open_url() - # print(convert_to_dist_str(final_state_2)) - # order = ( - # - # ) - # assert tuple(final_state_2) == tuple(solver._index[Dist(d)] for d in order) - # - # specs = specs + specs_to_add - # specs_to_remove = MatchSpec("uses-spiffy-test-app"), - # with get_solver_3(specs_to_remove=specs_to_remove, prefix_records=final_state_2, - # history_specs=specs) as solver: - # final_state_3 = solver.solve_final_state() - # # PrefixDag(final_state_2, specs).open_url() - # print(convert_to_dist_str(final_state_3)) - # order = ( - # - # ) - # assert tuple(final_state_3) == tuple(solver._index[Dist(d)] for d in order) +# @patch.object(Context, 'prefix_specified') +# def test_simple_install_uninstall(self, prefix_specified): +# prefix_specified.__get__ = Mock(return_value=False) +# +# specs = MatchSpec("spiffy-test-app"), +# with get_solver_3(specs) as solver: +# final_state_1 = solver.solve_final_state() +# # PrefixDag(final_state_1, specs).open_url() +# print(convert_to_dist_str(final_state_1)) +# order = ( +# 'channel-1::openssl-1.0.2l-0', +# 'channel-1::readline-6.2-2', +# 'channel-1::sqlite-3.13.0-0', +# 'channel-1::tk-8.5.18-0', +# 'channel-1::zlib-1.2.8-3', +# 'channel-1::python-2.7.13-0', +# 'channel-1::spiffy-test-app-2.0-py27hf99fac9_0', +# ) +# assert tuple(final_state_1) == tuple(solver._index[Dist(d)] for d in order) +# +# specs_to_add = MatchSpec("uses-spiffy-test-app"), +# with get_solver_3(specs_to_add, prefix_records=final_state_1, history_specs=specs) as solver: +# final_state_2 = solver.solve_final_state() +# # PrefixDag(final_state_2, specs).open_url() +# print(convert_to_dist_str(final_state_2)) +# order = ( +# +# ) +# assert tuple(final_state_2) == tuple(solver._index[Dist(d)] for d in order) +# +# specs = specs + specs_to_add +# specs_to_remove = MatchSpec("uses-spiffy-test-app"), +# with get_solver_3(specs_to_remove=specs_to_remove, prefix_records=final_state_2, +# history_specs=specs) as solver: +# final_state_3 = solver.solve_final_state() +# # PrefixDag(final_state_2, specs).open_url() +# print(convert_to_dist_str(final_state_3)) +# order = ( +# +# ) +# assert tuple(final_state_3) == tuple(solver._index[Dist(d)] for d in order) def test_current_repodata_usage(tmpdir): # force this to False, because otherwise tests fail when run with old conda-build - with env_var('CONDA_USE_ONLY_TAR_BZ2', False, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_USE_ONLY_TAR_BZ2", False, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): solver = context.plugin_manager.get_cached_solver_backend()( - tmpdir.strpath, (Channel(CHANNEL_DIR),), ('win-64',), - specs_to_add=[MatchSpec('zlib')], repodata_fn='current_repodata.json' + tmpdir.strpath, + (Channel(CHANNEL_DIR),), + ("win-64",), + specs_to_add=[MatchSpec("zlib")], + repodata_fn="current_repodata.json", ) final_state = solver.solve_final_state() # zlib 1.2.11, vc 14.1, vs2015_runtime, virtual package for vc track_feature assert final_state checked = False for prec in final_state: - if prec.name == 'zlib': - assert prec.version == '1.2.11' - assert prec.fn.endswith('.conda') + if prec.name == "zlib": + assert prec.version == "1.2.11" + assert prec.fn.endswith(".conda") checked = True if not checked: raise ValueError("Didn't have expected state in solve (needed zlib record)") @@ -2238,74 +2729,123 @@ def test_current_repodata_usage(tmpdir): def test_current_repodata_fallback(tmpdir): solver = context.plugin_manager.get_cached_solver_backend()( - tmpdir.strpath, (Channel(CHANNEL_DIR),), ('win-64',), - specs_to_add=[MatchSpec('zlib=1.2.8')] + tmpdir.strpath, + (Channel(CHANNEL_DIR),), + ("win-64",), + specs_to_add=[MatchSpec("zlib=1.2.8")], ) final_state = solver.solve_final_state() # zlib 1.2.11, zlib 1.2.8, vc 14.1, vs2015_runtime, virtual package for vc track_feature assert final_state checked = False for prec in final_state: - if prec.name == 'zlib': - assert prec.version == '1.2.8' - assert prec.fn.endswith('.tar.bz2') + if prec.name == "zlib": + assert prec.version == "1.2.8" + assert prec.fn.endswith(".tar.bz2") checked = True if not checked: raise ValueError("Didn't have expected state in solve (needed zlib record)") def test_downgrade_python_prevented_with_sane_message(tmpdir): - specs = MatchSpec("python=2.6"), + specs = (MatchSpec("python=2.6"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() # PrefixDag(final_state_1, specs).open_url() pprint(convert_to_dist_str(final_state_1)) - order = add_subdir_to_iter(( - 'channel-1::openssl-1.0.1c-0', - 'channel-1::readline-6.2-0', - 'channel-1::sqlite-3.7.13-0', - 'channel-1::system-5.8-1', - 'channel-1::tk-8.5.13-0', - 'channel-1::zlib-1.2.7-0', - 'channel-1::python-2.6.8-6', - )) + order = add_subdir_to_iter( + ( + "channel-1::openssl-1.0.1c-0", + "channel-1::readline-6.2-0", + "channel-1::sqlite-3.7.13-0", + "channel-1::system-5.8-1", + "channel-1::tk-8.5.13-0", + "channel-1::zlib-1.2.7-0", + "channel-1::python-2.6.8-6", + ) + ) assert convert_to_dist_str(final_state_1) == order # incompatible CLI and configured specs - specs_to_add = MatchSpec("scikit-learn==0.13"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_add = (MatchSpec("scikit-learn==0.13"),) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: with pytest.raises(UnsatisfiableError) as exc: solver.solve_final_state() error_msg = str(exc.value).strip() - assert "incompatible with the existing python installation in your environment:" in error_msg + assert ( + "incompatible with the existing python installation in your environment:" + in error_msg + ) assert "- scikit-learn==0.13 -> python=2.7" in error_msg assert "Your python: python=2.6" in error_msg - specs_to_add = MatchSpec("unsatisfiable-with-py26"), - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + specs_to_add = (MatchSpec("unsatisfiable-with-py26"),) + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: with pytest.raises(UnsatisfiableError) as exc: solver.solve_final_state() error_msg = str(exc.value).strip() - assert "incompatible with the existing python installation in your environment:" in error_msg + assert ( + "incompatible with the existing python installation in your environment:" + in error_msg + ) assert "- unsatisfiable-with-py26 -> python=2.7" in error_msg assert "Your python: python=2.6" + fake_index = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.1", channel="test", subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.1", + channel="test", + subdir="conda-test", + fn="mypkg-0.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.0", channel="test", subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.0", + channel="test", + subdir="conda-test", + fn="mypkg-0.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkgnot", version="1.1.1", channel="test", subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg 0.1.0'], constrains=[] - ) + package_type=PackageType.NOARCH_GENERIC, + name="mypkgnot", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg 0.1.0"], + constrains=[], + ), ] @@ -2313,9 +2853,12 @@ def test_packages_in_solution_change_already_newest(tmpdir): specs = MatchSpec("mypkg") pre_packages = {"mypkg": [("mypkg", "0.1.1")]} post_packages = {"mypkg": [("mypkg", "0.1.1")]} - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[specs]) - constrained = solver.get_constrained_packages(pre_packages, post_packages, fake_index) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[specs] + ) + constrained = solver.get_constrained_packages( + pre_packages, post_packages, fake_index + ) assert len(constrained) == 0 @@ -2323,9 +2866,12 @@ def test_packages_in_solution_change_needs_update(tmpdir): specs = MatchSpec("mypkg") pre_packages = {"mypkg": [("mypkg", "0.1.0")]} post_packages = {"mypkg": [("mypkg", "0.1.1")]} - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[specs]) - constrained = solver.get_constrained_packages(pre_packages, post_packages, fake_index) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[specs] + ) + constrained = solver.get_constrained_packages( + pre_packages, post_packages, fake_index + ) assert len(constrained) == 0 @@ -2333,28 +2879,50 @@ def test_packages_in_solution_change_constrained(tmpdir): specs = MatchSpec("mypkg") pre_packages = {"mypkg": [("mypkg", "0.1.0")]} post_packages = {"mypkg": [("mypkg", "0.1.0")]} - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[specs]) - constrained = solver.get_constrained_packages(pre_packages, post_packages, fake_index) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[specs] + ) + constrained = solver.get_constrained_packages( + pre_packages, post_packages, fake_index + ) assert len(constrained) == 1 def test_determine_constricting_specs_conflicts(tmpdir): solution_prec = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.0", channel="test", subdir="conda-test", + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.0", + channel="test", + subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkgnot", version="1.1.1", channel="test", - subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg 0.1.0'], constrains=[] - ) + package_type=PackageType.NOARCH_GENERIC, + name="mypkgnot", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg 0.1.0"], + constrains=[], + ), ] spec = MatchSpec("mypkg") - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[spec]) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[spec] + ) constricting = solver.determine_constricting_specs(spec, solution_prec) assert any(i for i in constricting if i[0] == "mypkgnot") @@ -2362,19 +2930,38 @@ def test_determine_constricting_specs_conflicts(tmpdir): def test_determine_constricting_specs_conflicts_upperbound(tmpdir): solution_prec = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.1", channel="test", subdir="conda-test", + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.1", + channel="test", + subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkgnot", version="1.1.1", channel="test", - subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg <=0.1.1'], constrains=[] - ) + package_type=PackageType.NOARCH_GENERIC, + name="mypkgnot", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg <=0.1.1"], + constrains=[], + ), ] spec = MatchSpec("mypkg") - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[spec]) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[spec] + ) constricting = solver.determine_constricting_specs(spec, solution_prec) assert any(i for i in constricting if i[0] == "mypkgnot") @@ -2382,24 +2969,52 @@ def test_determine_constricting_specs_conflicts_upperbound(tmpdir): def test_determine_constricting_specs_multi_conflicts(tmpdir): solution_prec = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.1", channel="test", subdir="conda-test", + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.1", + channel="test", + subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkgnot", version="1.1.1", channel="test", - subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg <=0.1.1'], constrains=[] + package_type=PackageType.NOARCH_GENERIC, + name="mypkgnot", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg <=0.1.1"], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="notmypkg", version="1.1.1", channel="test", - subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg 0.1.1'], constrains=[] - ) + package_type=PackageType.NOARCH_GENERIC, + name="notmypkg", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg 0.1.1"], + constrains=[], + ), ] spec = MatchSpec("mypkg") - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[spec]) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[spec] + ) constricting = solver.determine_constricting_specs(spec, solution_prec) assert any(i for i in constricting if i[0] == "mypkgnot") assert any(i for i in constricting if i[0] == "notmypkg") @@ -2408,19 +3023,38 @@ def test_determine_constricting_specs_multi_conflicts(tmpdir): def test_determine_constricting_specs_no_conflicts_upperbound_compound_depends(tmpdir): solution_prec = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.1", channel="test", subdir="conda-test", + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.1", + channel="test", + subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkgnot", version="1.1.1", channel="test", - subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg >=0.1.1,<0.2.1'], constrains=[] - ) + package_type=PackageType.NOARCH_GENERIC, + name="mypkgnot", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg >=0.1.1,<0.2.1"], + constrains=[], + ), ] spec = MatchSpec("mypkg") - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[spec]) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[spec] + ) constricting = solver.determine_constricting_specs(spec, solution_prec) assert constricting is None @@ -2428,19 +3062,38 @@ def test_determine_constricting_specs_no_conflicts_upperbound_compound_depends(t def test_determine_constricting_specs_no_conflicts_version_star(tmpdir): solution_prec = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.1", channel="test", subdir="conda-test", + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.1", + channel="test", + subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkgnot", version="1.1.1", channel="test", - subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg 0.1.*'], constrains=[] - ) + package_type=PackageType.NOARCH_GENERIC, + name="mypkgnot", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg 0.1.*"], + constrains=[], + ), ] spec = MatchSpec("mypkg") - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[spec]) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[spec] + ) constricting = solver.determine_constricting_specs(spec, solution_prec) assert constricting is None @@ -2448,14 +3101,24 @@ def test_determine_constricting_specs_no_conflicts_version_star(tmpdir): def test_determine_constricting_specs_no_conflicts_free(tmpdir): solution_prec = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.1", channel="test", subdir="conda-test", + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.1", + channel="test", + subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), ] spec = MatchSpec("mypkg") - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[spec]) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[spec] + ) constricting = solver.determine_constricting_specs(spec, solution_prec) assert constricting is None @@ -2463,19 +3126,38 @@ def test_determine_constricting_specs_no_conflicts_free(tmpdir): def test_determine_constricting_specs_no_conflicts_no_upperbound(tmpdir): solution_prec = [ PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkg", version="0.1.1", channel="test", subdir="conda-test", + package_type=PackageType.NOARCH_GENERIC, + name="mypkg", + version="0.1.1", + channel="test", + subdir="conda-test", fn="mypkg-0.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=[], constrains=[] + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=[], + constrains=[], ), PrefixRecord( - package_type=PackageType.NOARCH_GENERIC, name="mypkgnot", version="1.1.1", channel="test", - subdir="conda-test", fn="mypkgnot-1.1.1", - build="pypi_0", build_number=1, paths_data=None, files=None, depends=['mypkg >=0.0.5'], constrains=[] - ) + package_type=PackageType.NOARCH_GENERIC, + name="mypkgnot", + version="1.1.1", + channel="test", + subdir="conda-test", + fn="mypkgnot-1.1.1", + build="pypi_0", + build_number=1, + paths_data=None, + files=None, + depends=["mypkg >=0.0.5"], + constrains=[], + ), ] spec = MatchSpec("mypkg") - solver = context.plugin_manager.get_cached_solver_backend()(tmpdir, (Channel(CHANNEL_DIR),), ('linux-64',), - specs_to_add=[spec]) + solver = context.plugin_manager.get_cached_solver_backend()( + tmpdir, (Channel(CHANNEL_DIR),), ("linux-64",), specs_to_add=[spec] + ) constricting = solver.determine_constricting_specs(spec, solution_prec) assert constricting is None @@ -2493,7 +3175,7 @@ def test_indirect_dep_optimized_by_version_over_package_count(tmpdir): version of that new metapackage has fewer deps (but newer version). We want it to prefer the newer version. """ - specs = MatchSpec("anaconda=1.4"), + specs = (MatchSpec("anaconda=1.4"),) with get_solver(tmpdir, specs) as solver: final_state_1 = solver.solve_final_state() @@ -2506,8 +3188,12 @@ def test_indirect_dep_optimized_by_version_over_package_count(tmpdir): # This does NOT work if you omit the anaconda matchspec here. It is part of the history, # and it must be supplied as an explicit spec to override that history. specs_to_add = MatchSpec("zeromq"), MatchSpec("anaconda") - with get_solver(tmpdir, specs_to_add=specs_to_add, prefix_records=final_state_1, - history_specs=specs) as solver: + with get_solver( + tmpdir, + specs_to_add=specs_to_add, + prefix_records=final_state_1, + history_specs=specs, + ) as solver: final_state = solver.solve_final_state() # anaconda, _dummy_anaconda_impl, zeromq. NOT bzip2 @@ -2515,9 +3201,9 @@ def test_indirect_dep_optimized_by_version_over_package_count(tmpdir): # bzip2 is part of the older _dummy_anaconda_impl for prec in final_state: - if prec.name == 'anaconda': - assert prec.version == '1.5.0' - elif prec.name == 'zeromq': + if prec.name == "anaconda": + assert prec.version == "1.5.0" + elif prec.name == "zeromq": assert prec.build_number == 1 - elif prec.name == '_dummy_anaconda_impl': + elif prec.name == "_dummy_anaconda_impl": assert prec.version == "2.0" diff --git a/tests/core/test_subdir_data.py b/tests/core/test_subdir_data.py index 7f210c6fbf0..f1c2f8140b6 100644 --- a/tests/core/test_subdir_data.py +++ b/tests/core/test_subdir_data.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger from os.path import join from time import sleep @@ -24,7 +23,11 @@ from conda.exports import url_path from conda.gateways.connection import SSLError from conda.gateways.connection.session import CondaSession -from conda.gateways.repodata import CondaRepoInterface, RepodataCache, Response304ContentUnchanged +from conda.gateways.repodata import ( + CondaRepoInterface, + RepodataCache, + Response304ContentUnchanged, +) from conda.models.channel import Channel from conda.models.records import PackageRecord from conda.testing.helpers import CHANNEL_DIR @@ -34,7 +37,9 @@ # some test dependencies are unavailable on newer platforsm OVERRIDE_PLATFORM = ( - "linux-64" if context.subdir not in ("win-64", "linux-64", "osx-64") else context.subdir + "linux-64" + if context.subdir not in ("win-64", "linux-64", "osx-64") + else context.subdir ) @@ -55,7 +60,9 @@ def test_get_index_no_platform_with_offline_cache(self, platform=OVERRIDE_PLATFO {"CONDA_REPODATA_TIMEOUT_SECS": "0", "CONDA_PLATFORM": platform}, stack_callback=conda_tests_ctxt_mgmt_def_pol, ): - with patch.object(conda.core.subdir_data, "read_mod_and_etag") as read_mod_and_etag: + with patch.object( + conda.core.subdir_data, "read_mod_and_etag" + ) as read_mod_and_etag: read_mod_and_etag.return_value = {} channel_urls = ("https://repo.anaconda.com/pkgs/pro",) @@ -75,13 +82,17 @@ def test_get_index_no_platform_with_offline_cache(self, platform=OVERRIDE_PLATFO # supplement_index_from_cache on CI? for unknown in (None, False, True): - with env_var("CONDA_OFFLINE", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_OFFLINE", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): # note `fetch_repodata_remote_request` will no longer be called # by conda code, and is only there for backwards compatibility. with patch.object( conda.core.subdir_data, "fetch_repodata_remote_request" ) as remote_request: - index2 = get_index(channel_urls=channel_urls, prepend=False, unknown=unknown) + index2 = get_index( + channel_urls=channel_urls, prepend=False, unknown=unknown + ) assert all(index2.get(k) == rec for k, rec in index.items()) assert unknown is not False or len(index) == len(index2) assert remote_request.call_count == 0 @@ -95,7 +106,9 @@ def test_get_index_no_platform_with_offline_cache(self, platform=OVERRIDE_PLATFO conda.core.subdir_data, "fetch_repodata_remote_request" ) as remote_request: remote_request.side_effect = Response304ContentUnchanged() - index3 = get_index(channel_urls=channel_urls, prepend=False, unknown=unknown) + index3 = get_index( + channel_urls=channel_urls, prepend=False, unknown=unknown + ) assert all(index3.get(k) == rec for k, rec in index.items()) assert unknown or len(index) == len(index3) @@ -114,7 +127,8 @@ def test_get_index_no_platform_with_offline_cache(self, platform=OVERRIDE_PLATFO # test load from cache with env_vars( - {"CONDA_USE_INDEX_CACHE": "true"}, stack_callback=conda_tests_ctxt_mgmt_def_pol + {"CONDA_USE_INDEX_CACHE": "true"}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): sd._load() @@ -260,12 +274,16 @@ def test_subdir_data_prefers_conda_to_tar_bz2(platform=OVERRIDE_PLATFORM): def test_use_only_tar_bz2(platform=OVERRIDE_PLATFORM): channel = Channel(join(CHANNEL_DIR, platform)) SubdirData.clear_cached_local_channel_data() - with env_var("CONDA_USE_ONLY_TAR_BZ2", True, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_USE_ONLY_TAR_BZ2", True, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): sd = SubdirData(channel) precs = tuple(sd.query("zlib")) assert precs[0].fn.endswith(".tar.bz2") SubdirData.clear_cached_local_channel_data() - with env_var("CONDA_USE_ONLY_TAR_BZ2", False, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_USE_ONLY_TAR_BZ2", False, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): sd = SubdirData(channel) precs = tuple(sd.query("zlib")) assert precs[0].fn.endswith(".conda") diff --git a/tests/data/build-index2-json.py b/tests/data/build-index2-json.py index 6925ce5e97e..234b588fee2 100644 --- a/tests/data/build-index2-json.py +++ b/tests/data/build-index2-json.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json from os.path import abspath, dirname, join @@ -8,11 +7,18 @@ DATA_DIR = abspath(join(dirname(__file__), "repodata")) + def save_data_source(url, name): raw_repodata_str = fetch_repodata_remote_request(url, None, None) json.loads(raw_repodata_str) - with open(join(DATA_DIR, name + ".json"), 'w') as fh: - json.dump(json.loads(raw_repodata_str), fh, indent=2, sort_keys=True, separators=(',', ': ')) + with open(join(DATA_DIR, name + ".json"), "w") as fh: + json.dump( + json.loads(raw_repodata_str), + fh, + indent=2, + sort_keys=True, + separators=(",", ": "), + ) def read_data_source(name): @@ -25,153 +31,150 @@ def main(): r2json = read_data_source("conda-test_noarch") r3json = read_data_source("conda-test_linux-64") - packages = r3json['packages'].copy() - packages.update(r1json['packages']) - packages.update(r2json['packages']) - - + packages = r3json["packages"].copy() + packages.update(r1json["packages"]) + packages.update(r2json["packages"]) keep_list = ( - 'asn1crypto', - 'astroid', - 'backports', - 'backports_abc', - 'bkcharts', - 'bokeh', - 'boto3', - 'botocore', - 'certifi', - 'cffi', - 'chest', - 'click', - 'cloog', - 'cloudpickle', - 'colorama', - 'conda', - 'conda-env', - 'cryptography', - 'dask', - 'dateutil', - 'decorator', - 'dill', - 'distribute', - 'distributed', - 'docutils', - 'enum34', - 'flask', - 'funcsigs', - 'futures', - 'get_terminal_size', - 'gevent', - 'gevent-websocket', - 'gmp', - 'greenlet', - 'heapdict', - 'idna', - 'ipaddress', - 'ipython', - 'ipython_genutils', - 'isl', - 'itsdangerous', - 'jedi', - 'jinja2', - 'jmespath', - 'lazy-object-proxy', - 'libevent', - 'libffi', - 'libgcc', - 'libgfortran', - 'libsodium', - 'llvm', - 'llvmlite', - 'llvmmath', - 'llvmpy', - 'locket', - 'logilab-common', - 'lz4', - 'markupsafe', - 'meta', - 'mkl', - 'mpc', - 'mpfr', - 'msgpack-python', - 'needs-spiffy-test-app', - 'nomkl', - 'nose', - 'numpy', - 'openblas', - 'openssl', - 'ordereddict', - 'packaging', - 'pandas', - 'partd', - 'path.py', - 'pathlib2', - 'pexpect', - 'pickleshare', - 'pip', - 'prompt_toolkit', - 'psutil', - 'ptyprocess', - 'pyasn1', - 'pycosat', - 'pycparser', - 'pygments', - 'pyopenssl', - 'pyparsing', - 'python', - 'python-dateutil', - 'pytz', - 'pyyaml', - 'pyzmq', - 'readline', - 'redis', - 'redis-py', - 'requests', - 'ruamel_yaml', - 's3fs', - 's3transfer', - 'scandir', - 'scipy', - 'setuptools', - 'simplegeneric', - 'singledispatch', - 'six', - 'sortedcollections', - 'sortedcontainers', - 'spiffy-test-app', - 'sqlite', - 'ssl_match_hostname', - 'system', - 'tblib', - 'tk', - 'toolz', - 'tornado', - 'traitlets', - 'ujson', - 'uses-spiffy-test-app', - 'util-linux', - 'wcwidth', - 'werkzeug', - 'wheel', - 'wrapt', - 'xz', - 'yaml', - 'zeromq', - 'zict', - 'zlib', - - 'system', - 'functools_lru_cache', + "asn1crypto", + "astroid", + "backports", + "backports_abc", + "bkcharts", + "bokeh", + "boto3", + "botocore", + "certifi", + "cffi", + "chest", + "click", + "cloog", + "cloudpickle", + "colorama", + "conda", + "conda-env", + "cryptography", + "dask", + "dateutil", + "decorator", + "dill", + "distribute", + "distributed", + "docutils", + "enum34", + "flask", + "funcsigs", + "futures", + "get_terminal_size", + "gevent", + "gevent-websocket", + "gmp", + "greenlet", + "heapdict", + "idna", + "ipaddress", + "ipython", + "ipython_genutils", + "isl", + "itsdangerous", + "jedi", + "jinja2", + "jmespath", + "lazy-object-proxy", + "libevent", + "libffi", + "libgcc", + "libgfortran", + "libsodium", + "llvm", + "llvmlite", + "llvmmath", + "llvmpy", + "locket", + "logilab-common", + "lz4", + "markupsafe", + "meta", + "mkl", + "mpc", + "mpfr", + "msgpack-python", + "needs-spiffy-test-app", + "nomkl", + "nose", + "numpy", + "openblas", + "openssl", + "ordereddict", + "packaging", + "pandas", + "partd", + "path.py", + "pathlib2", + "pexpect", + "pickleshare", + "pip", + "prompt_toolkit", + "psutil", + "ptyprocess", + "pyasn1", + "pycosat", + "pycparser", + "pygments", + "pyopenssl", + "pyparsing", + "python", + "python-dateutil", + "pytz", + "pyyaml", + "pyzmq", + "readline", + "redis", + "redis-py", + "requests", + "ruamel_yaml", + "s3fs", + "s3transfer", + "scandir", + "scipy", + "setuptools", + "simplegeneric", + "singledispatch", + "six", + "sortedcollections", + "sortedcontainers", + "spiffy-test-app", + "sqlite", + "ssl_match_hostname", + "system", + "tblib", + "tk", + "toolz", + "tornado", + "traitlets", + "ujson", + "uses-spiffy-test-app", + "util-linux", + "wcwidth", + "werkzeug", + "wheel", + "wrapt", + "xz", + "yaml", + "zeromq", + "zict", + "zlib", + "system", + "functools_lru_cache", ) keep = {} missing_in_allowlist = set() for fn, info in packages.items(): - if info['name'] in keep_list: + if info["name"] in keep_list: keep[fn] = info - for dep in info['depends']: + for dep in info["depends"]: dep = dep.split()[0] if dep not in keep_list: missing_in_allowlist.add(dep) @@ -180,8 +183,8 @@ def main(): print(">>> missing <<<") print(missing_in_allowlist) - with open(join(dirname(__file__), 'index2.json'), 'w') as fh: - fh.write(json.dumps(keep, indent=2, sort_keys=True, separators=(',', ': '))) + with open(join(dirname(__file__), "index2.json"), "w") as fh: + fh.write(json.dumps(keep, indent=2, sort_keys=True, separators=(",", ": "))) if __name__ == "__main__": diff --git a/tests/data/build-index4-json.py b/tests/data/build-index4-json.py index 99bc7626641..956e43b3847 100644 --- a/tests/data/build-index4-json.py +++ b/tests/data/build-index4-json.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json from os.path import abspath, dirname, join from pprint import pprint @@ -9,11 +8,18 @@ DATA_DIR = abspath(join(dirname(__file__), "repodata")) + def save_data_source(url, name): raw_repodata_str = fetch_repodata_remote_request(url, None, None) json.loads(raw_repodata_str) - with open(join(DATA_DIR, name + ".json"), 'w') as fh: - json.dump(json.loads(raw_repodata_str), fh, indent=2, sort_keys=True, separators=(',', ': ')) + with open(join(DATA_DIR, name + ".json"), "w") as fh: + json.dump( + json.loads(raw_repodata_str), + fh, + indent=2, + sort_keys=True, + separators=(",", ": "), + ) def read_data_source(name): @@ -38,15 +44,15 @@ def main(): r0json = read_data_source("free_linux-64") keep_list = ( - 'gcc', - 'ipython-notebook', + "gcc", + "ipython-notebook", ) _keep = {} missing_in_allowlist = set() - for fn, info in r0json['packages'].items(): - if info['name'] in keep_list: + for fn, info in r0json["packages"].items(): + if info["name"] in keep_list: _keep[fn] = info - for dep in info['depends']: + for dep in info["depends"]: dep = dep.split()[0] if dep not in keep_list: missing_in_allowlist.add(dep) @@ -55,305 +61,289 @@ def main(): # pprint(missing_in_allowlist) keep.update(_keep) - r1json = read_data_source("main_linux-64") keep_list = ( - 'asn1crypto', - 'astroid', - 'backports', - 'backports_abc', - 'bkcharts', - 'bokeh', - 'boto3', - 'botocore', - 'certifi', - 'cffi', - 'chest', - 'click', - 'cloog', - 'cloudpickle', - 'colorama', - 'conda-env', - 'cryptography', - 'dask', - 'dateutil', - 'decorator', - 'dill', - 'distribute', - 'distributed', - 'docutils', - 'enum34', - 'flask', - 'funcsigs', - 'futures', - 'get_terminal_size', - 'gevent', - 'gevent-websocket', - 'gmp', - 'greenlet', - 'heapdict', - 'idna', - 'ipaddress', - 'ipython', - 'ipython_genutils', - 'isl', - 'itsdangerous', - 'jedi', - 'jinja2', - 'jmespath', - 'lazy-object-proxy', - 'libevent', - 'libffi', - 'libgcc', - 'libgfortran', - 'libsodium', - 'llvm', - 'llvmlite', - 'llvmmath', - 'llvmpy', - 'locket', - 'logilab-common', - 'lz4', - 'markupsafe', - 'meta', - 'mkl', - 'mpc', - 'mpfr', - 'msgpack-python', - 'needs-spiffy-test-app', - 'nomkl', - 'nose', - 'numpy', - 'openblas', - 'openssl', - 'ordereddict', - 'packaging', - 'pandas', - 'partd', - 'path.py', - 'pathlib2', - 'pexpect', - 'pickleshare', - 'pip', - 'prompt_toolkit', - 'psutil', - 'ptyprocess', - 'pyasn1', - 'pycosat', - 'pycparser', - 'pygments', - 'pyopenssl', - 'pyparsing', - 'python', - 'python-dateutil', - 'pytz', - 'pyyaml', - 'pyzmq', - 'readline', - 'redis', - 'redis-py', - 'requests', - 'ruamel_yaml', - 's3fs', - 's3transfer', - 'scandir', - 'scipy', - 'setuptools', - 'simplegeneric', - 'singledispatch', - 'six', - 'sortedcollections', - 'sortedcontainers', - 'spiffy-test-app', - 'sqlite', - 'ssl_match_hostname', - 'system', - 'tblib', - 'tk', - 'toolz', - 'tornado', - 'traitlets', - 'ujson', - 'uses-spiffy-test-app', - 'util-linux', - 'wcwidth', - 'werkzeug', - 'wheel', - 'wrapt', - 'xz', - 'yaml', - 'zeromq', - 'zict', - 'zlib', - - 'intel-openmp', - 'libgcc-ng', - 'libedit', - 'urllib3', - 'backports.shutil_get_terminal_size', - 'libgfortran-ng', - 'ncurses', - 'matplotlib', - 'ca-certificates', - 'chardet', - 'dask-core', - 'libstdcxx-ng', - 'backports.functools_lru_cache', - 'cycler', - 'freetype', - 'icu', - 'subprocess32', - 'pysocks', - 'pyqt', - 'libpng', - 'functools32', - 'qt', - 'sip', - 'dbus', - 'jpeg', - 'glib', - 'gst-plugins-base', - 'libxcb', - 'fontconfig', - 'expat', - 'pcre', - 'gstreamer', - 'libxml2', - - 'parso', - 'openblas-devel', - 'libopenblas', - - 'conda-build', - 'pkginfo', - 'glob2', - 'filelock', - 'conda-verify', - 'contextlib2', - 'patchelf', - 'beautifulsoup4', - - 'conda', - 'cytoolz', - - 'mkl_fft', - 'mkl_random', - 'kiwisolver', - 'numpydoc', - 'blas', - 'libuuid', - 'numpy-base', - 'backcall', - 'sphinx', - 'alabaster', - 'sphinxcontrib-websupport', - 'imagesize', - 'typing', - 'babel', - 'snowballstemmer', - 'sphinxcontrib', - - 'rpy2', - - 'notebook', - 'ipykernel', - 'jupyter_client', - 'jupyter_core', - 'nbconvert', - 'nbformat', - 'send2trash', - 'terminado', - 'bleach', - 'entrypoints', - 'jsonschema', - 'mistune', - 'pandoc', - 'pandocfilters', - 'testpath', - 'html5lib', - 'configparser', - - 'bwidget', - 'bzip2', - 'cairo', - 'configparser', - 'curl', - 'gcc_linux-64', - 'gfortran_linux-64', - 'gsl', - 'gxx_linux-64', - 'html5lib', - 'krb5', - 'libcurl', - 'libssh2', - 'libtiff', - 'pango', - 'tktable', - 'binutils_linux-64', - 'fribidi', - 'gcc_impl_linux-64', - 'gfortran_impl_linux-64', - 'graphite2', - 'gxx_impl_linux-64', - 'harfbuzz', - 'pixman', - 'webencodings', - 'binutils_impl_linux-64', - 'freeglut', - 'ipython-notebook', - 'jupyter', - 'libxslt', - 'qtconsole', - 'ipywidgets', - 'jupyter_console', - 'widgetsnbextension', - - 'graphviz', - 'libtool', - - 'python-graphviz', - - 'ibis-framework', - 'impyla', - 'thriftpy', - 'thrift', - 'psycopg2', - 'sqlalchemy', - 'multipledispatch', - 'ply', - 'libpq', - 'bitarray', - - 'cryptography-vectors', - 'future', - 'package_has_been_revoked', - 'prometheus_client', - 'typed-ast', - 'twisted', - - 'appdirs', - 'automat', - 'constantly', - 'hyperlink', - 'incremental', - 'service_identity', - 'zope.interface', - 'zope', - 'pyasn1-modules', - 'attrs', - 'pympler', - 'hypothesis', - 'coverage', - - 'make', - + "asn1crypto", + "astroid", + "backports", + "backports_abc", + "bkcharts", + "bokeh", + "boto3", + "botocore", + "certifi", + "cffi", + "chest", + "click", + "cloog", + "cloudpickle", + "colorama", + "conda-env", + "cryptography", + "dask", + "dateutil", + "decorator", + "dill", + "distribute", + "distributed", + "docutils", + "enum34", + "flask", + "funcsigs", + "futures", + "get_terminal_size", + "gevent", + "gevent-websocket", + "gmp", + "greenlet", + "heapdict", + "idna", + "ipaddress", + "ipython", + "ipython_genutils", + "isl", + "itsdangerous", + "jedi", + "jinja2", + "jmespath", + "lazy-object-proxy", + "libevent", + "libffi", + "libgcc", + "libgfortran", + "libsodium", + "llvm", + "llvmlite", + "llvmmath", + "llvmpy", + "locket", + "logilab-common", + "lz4", + "markupsafe", + "meta", + "mkl", + "mpc", + "mpfr", + "msgpack-python", + "needs-spiffy-test-app", + "nomkl", + "nose", + "numpy", + "openblas", + "openssl", + "ordereddict", + "packaging", + "pandas", + "partd", + "path.py", + "pathlib2", + "pexpect", + "pickleshare", + "pip", + "prompt_toolkit", + "psutil", + "ptyprocess", + "pyasn1", + "pycosat", + "pycparser", + "pygments", + "pyopenssl", + "pyparsing", + "python", + "python-dateutil", + "pytz", + "pyyaml", + "pyzmq", + "readline", + "redis", + "redis-py", + "requests", + "ruamel_yaml", + "s3fs", + "s3transfer", + "scandir", + "scipy", + "setuptools", + "simplegeneric", + "singledispatch", + "six", + "sortedcollections", + "sortedcontainers", + "spiffy-test-app", + "sqlite", + "ssl_match_hostname", + "system", + "tblib", + "tk", + "toolz", + "tornado", + "traitlets", + "ujson", + "uses-spiffy-test-app", + "util-linux", + "wcwidth", + "werkzeug", + "wheel", + "wrapt", + "xz", + "yaml", + "zeromq", + "zict", + "zlib", + "intel-openmp", + "libgcc-ng", + "libedit", + "urllib3", + "backports.shutil_get_terminal_size", + "libgfortran-ng", + "ncurses", + "matplotlib", + "ca-certificates", + "chardet", + "dask-core", + "libstdcxx-ng", + "backports.functools_lru_cache", + "cycler", + "freetype", + "icu", + "subprocess32", + "pysocks", + "pyqt", + "libpng", + "functools32", + "qt", + "sip", + "dbus", + "jpeg", + "glib", + "gst-plugins-base", + "libxcb", + "fontconfig", + "expat", + "pcre", + "gstreamer", + "libxml2", + "parso", + "openblas-devel", + "libopenblas", + "conda-build", + "pkginfo", + "glob2", + "filelock", + "conda-verify", + "contextlib2", + "patchelf", + "beautifulsoup4", + "conda", + "cytoolz", + "mkl_fft", + "mkl_random", + "kiwisolver", + "numpydoc", + "blas", + "libuuid", + "numpy-base", + "backcall", + "sphinx", + "alabaster", + "sphinxcontrib-websupport", + "imagesize", + "typing", + "babel", + "snowballstemmer", + "sphinxcontrib", + "rpy2", + "notebook", + "ipykernel", + "jupyter_client", + "jupyter_core", + "nbconvert", + "nbformat", + "send2trash", + "terminado", + "bleach", + "entrypoints", + "jsonschema", + "mistune", + "pandoc", + "pandocfilters", + "testpath", + "html5lib", + "configparser", + "bwidget", + "bzip2", + "cairo", + "configparser", + "curl", + "gcc_linux-64", + "gfortran_linux-64", + "gsl", + "gxx_linux-64", + "html5lib", + "krb5", + "libcurl", + "libssh2", + "libtiff", + "pango", + "tktable", + "binutils_linux-64", + "fribidi", + "gcc_impl_linux-64", + "gfortran_impl_linux-64", + "graphite2", + "gxx_impl_linux-64", + "harfbuzz", + "pixman", + "webencodings", + "binutils_impl_linux-64", + "freeglut", + "ipython-notebook", + "jupyter", + "libxslt", + "qtconsole", + "ipywidgets", + "jupyter_console", + "widgetsnbextension", + "graphviz", + "libtool", + "python-graphviz", + "ibis-framework", + "impyla", + "thriftpy", + "thrift", + "psycopg2", + "sqlalchemy", + "multipledispatch", + "ply", + "libpq", + "bitarray", + "cryptography-vectors", + "future", + "package_has_been_revoked", + "prometheus_client", + "typed-ast", + "twisted", + "appdirs", + "automat", + "constantly", + "hyperlink", + "incremental", + "service_identity", + "zope.interface", + "zope", + "pyasn1-modules", + "attrs", + "pympler", + "hypothesis", + "coverage", + "make", ) _keep = {} missing_in_allowlist = set() - for fn, info in r1json['packages'].items(): - if info['name'] in keep_list: + for fn, info in r1json["packages"].items(): + if info["name"] in keep_list: _keep[fn] = info - for dep in info['depends']: + for dep in info["depends"]: dep = dep.split()[0] if dep not in keep_list: missing_in_allowlist.add(dep) @@ -362,222 +352,217 @@ def main(): pprint(missing_in_allowlist) # patch 'conda-env' to include an namespace for the problem cases - conda_env_dicts = tuple(info for info in _keep.values() if info['name'] == 'conda-env') + conda_env_dicts = tuple( + info for info in _keep.values() if info["name"] == "conda-env" + ) for info in conda_env_dicts: - if not any(d.startswith('python') for d in info['depends']): - info['namespace'] = 'python' + if not any(d.startswith("python") for d in info["depends"]): + info["namespace"] = "python" keep.update(_keep) - r2json = read_data_source("conda-test_noarch") - keep.update(r2json['packages']) + keep.update(r2json["packages"]) r3json = read_data_source("main_noarch") - keep.update(r3json['packages']) + keep.update(r3json["packages"]) r4json = read_data_source("r_linux-64") _keep = {} missing_in_allowlist = set() keep_list = ( - 'mro-base', - 'r-base', - '_r-mutex', - - 'nlopt', # ignore this one - - 'r-essentials', - 'r', - 'r-broom', - 'r-caret', - 'r-data.table', - 'r-dbi', - 'r-dplyr', - 'r-forcats', - 'r-formatr', - 'r-ggplot2', - 'r-glmnet', - 'r-haven', - 'r-hms', - 'r-httr', - 'r-irkernel', - 'r-jsonlite', - 'r-lubridate', - 'r-magrittr', - 'r-modelr', - 'r-plyr', - 'r-purrr', - 'r-quantmod', - 'r-randomforest', - 'r-rbokeh', - 'r-readr', - 'r-readxl', - 'r-recommended', - 'r-reshape2', - 'r-rmarkdown', - 'r-rvest', - 'r-shiny', - 'r-stringr', - 'r-tibble', - 'r-tidyr', - 'r-tidyverse', - 'r-xml2', - 'r-zoo', - - 'mro-basics', - 'r-assertthat', - 'r-base64enc', - 'r-bh', - 'r-bindrcpp', - 'r-boot', - 'r-bradleyterry2', - 'r-car', - 'r-catools', - 'r-cellranger', - 'r-chron', - 'r-class', - 'r-cli', - 'r-cluster', - 'r-codetools', - 'r-crayon', - 'r-curl', - 'r-dbplyr', - 'r-digest', - 'r-evaluate', - 'r-foreach', - 'r-foreign', - 'r-gistr', - 'r-glue', - 'r-gtable', - 'r-hexbin', - 'r-htmltools', - 'r-htmlwidgets', - 'r-httpuv', - 'r-irdisplay', - 'r-kernsmooth', - 'r-knitr', - 'r-lattice', - 'r-lazyeval', - 'r-maps', - 'r-mass', - 'r-matrix', - 'r-memoise', - 'r-mgcv', - 'r-mime', - 'r-modelmetrics', - 'r-nlme', - 'r-nnet', - 'r-openssl', - 'r-pbdzmq', - 'r-pillar', - 'r-pkgconfig', - 'r-plogr', - 'r-proto', - 'r-pryr', - 'r-psych', - 'r-r6', - 'r-rcpp', - 'r-recipes', - 'r-repr', - 'r-reprex', - 'r-rjsonio', - 'r-rlang', - 'r-rpart', - 'r-rprojroot', - 'r-rstudioapi', - 'r-rzmq', - 'r-scales', - 'r-selectr', - 'r-sourcetools', - 'r-spatial', - 'r-stringi', - 'r-survival', - 'r-tidyselect', - 'r-ttr', - 'r-uuid', - 'r-withr', - 'r-xtable', - 'r-xts', - 'r-yaml', - - 'r-backports', - 'r-bindr', - 'r-bitops', - 'r-brglm', - 'r-callr', - 'r-checkpoint', - 'r-clipr', - 'r-ddalpha', - 'r-deployrrserve', - 'r-dichromat', - 'r-dimred', - 'r-doparallel', - 'r-gower', - 'r-gtools', - 'r-highr', - 'r-ipred', - 'r-iterators', - 'r-labeling', - 'r-lme4', - 'r-markdown', - 'r-microsoftr', - 'r-mnormt', - 'r-munsell', - 'r-pbkrtest', - 'r-png', - 'r-quantreg', - 'r-qvcalc', - 'r-rcolorbrewer', - 'r-rcpproll', - 'r-rematch', - 'r-revoioq', - 'r-revomods', - 'r-revoutilsmath', - 'r-runit', - 'r-timedate', - 'r-utf8', - 'r-viridislite', - 'r-whisker', - 'r-colorspace', - 'r-drr', - 'r-matrixmodels', - 'r-minqa', - 'r-nloptr', - 'r-prodlim', - 'r-profilemodel', - 'r-rcppeigen', - 'r-robustbase', - 'r-sfsmisc', - 'r-sparsem', - 'r-lava', - 'r-deoptimr', - 'r-kernlab', - 'r-cvst', - 'r-numderiv', - - 'r-abind', - 'r-cardata', - 'r-debugme', - 'r-geometry', - 'r-later', - 'r-maptools', - 'r-processx', - 'r-promises', - 'r-rio', - 'r-squarem', - 'r-testthat', - 'r-magic', - 'r-sp', - 'r-openxlsx', - 'r-praise', - 'r-zip', - + "mro-base", + "r-base", + "_r-mutex", + "nlopt", # ignore this one + "r-essentials", + "r", + "r-broom", + "r-caret", + "r-data.table", + "r-dbi", + "r-dplyr", + "r-forcats", + "r-formatr", + "r-ggplot2", + "r-glmnet", + "r-haven", + "r-hms", + "r-httr", + "r-irkernel", + "r-jsonlite", + "r-lubridate", + "r-magrittr", + "r-modelr", + "r-plyr", + "r-purrr", + "r-quantmod", + "r-randomforest", + "r-rbokeh", + "r-readr", + "r-readxl", + "r-recommended", + "r-reshape2", + "r-rmarkdown", + "r-rvest", + "r-shiny", + "r-stringr", + "r-tibble", + "r-tidyr", + "r-tidyverse", + "r-xml2", + "r-zoo", + "mro-basics", + "r-assertthat", + "r-base64enc", + "r-bh", + "r-bindrcpp", + "r-boot", + "r-bradleyterry2", + "r-car", + "r-catools", + "r-cellranger", + "r-chron", + "r-class", + "r-cli", + "r-cluster", + "r-codetools", + "r-crayon", + "r-curl", + "r-dbplyr", + "r-digest", + "r-evaluate", + "r-foreach", + "r-foreign", + "r-gistr", + "r-glue", + "r-gtable", + "r-hexbin", + "r-htmltools", + "r-htmlwidgets", + "r-httpuv", + "r-irdisplay", + "r-kernsmooth", + "r-knitr", + "r-lattice", + "r-lazyeval", + "r-maps", + "r-mass", + "r-matrix", + "r-memoise", + "r-mgcv", + "r-mime", + "r-modelmetrics", + "r-nlme", + "r-nnet", + "r-openssl", + "r-pbdzmq", + "r-pillar", + "r-pkgconfig", + "r-plogr", + "r-proto", + "r-pryr", + "r-psych", + "r-r6", + "r-rcpp", + "r-recipes", + "r-repr", + "r-reprex", + "r-rjsonio", + "r-rlang", + "r-rpart", + "r-rprojroot", + "r-rstudioapi", + "r-rzmq", + "r-scales", + "r-selectr", + "r-sourcetools", + "r-spatial", + "r-stringi", + "r-survival", + "r-tidyselect", + "r-ttr", + "r-uuid", + "r-withr", + "r-xtable", + "r-xts", + "r-yaml", + "r-backports", + "r-bindr", + "r-bitops", + "r-brglm", + "r-callr", + "r-checkpoint", + "r-clipr", + "r-ddalpha", + "r-deployrrserve", + "r-dichromat", + "r-dimred", + "r-doparallel", + "r-gower", + "r-gtools", + "r-highr", + "r-ipred", + "r-iterators", + "r-labeling", + "r-lme4", + "r-markdown", + "r-microsoftr", + "r-mnormt", + "r-munsell", + "r-pbkrtest", + "r-png", + "r-quantreg", + "r-qvcalc", + "r-rcolorbrewer", + "r-rcpproll", + "r-rematch", + "r-revoioq", + "r-revomods", + "r-revoutilsmath", + "r-runit", + "r-timedate", + "r-utf8", + "r-viridislite", + "r-whisker", + "r-colorspace", + "r-drr", + "r-matrixmodels", + "r-minqa", + "r-nloptr", + "r-prodlim", + "r-profilemodel", + "r-rcppeigen", + "r-robustbase", + "r-sfsmisc", + "r-sparsem", + "r-lava", + "r-deoptimr", + "r-kernlab", + "r-cvst", + "r-numderiv", + "r-abind", + "r-cardata", + "r-debugme", + "r-geometry", + "r-later", + "r-maptools", + "r-processx", + "r-promises", + "r-rio", + "r-squarem", + "r-testthat", + "r-magic", + "r-sp", + "r-openxlsx", + "r-praise", + "r-zip", ) all_package_names = {info["name"] for info in keep.values()} for fn, info in r4json["packages"].items(): if info["name"] in keep_list: _keep[fn] = info - for dep in info['depends']: + for dep in info["depends"]: dep = dep.split()[0] if dep not in keep_list and dep not in all_package_names: missing_in_allowlist.add(dep) @@ -586,15 +571,13 @@ def main(): pprint(missing_in_allowlist) # patch 'r' to be in global namespace - r_info_dicts = tuple(info for info in _keep.values() if info['name'] == 'r') + r_info_dicts = tuple(info for info in _keep.values() if info["name"] == "r") for info in r_info_dicts: - if any(dep.startswith("r-base") for dep in info['depends']): - info['namespace'] = "global" + if any(dep.startswith("r-base") for dep in info["depends"]): + info["namespace"] = "global" keep.update(_keep) - - r5json = read_data_source("conda-forge_linux-64") _keep = {} missing_in_allowlist = set() @@ -603,7 +586,7 @@ def main(): for fn, info in r5json["packages"].items(): if info["name"] in keep_list: _keep[fn] = info - for dep in info['depends']: + for dep in info["depends"]: dep = dep.split()[0] if dep not in keep_list and dep not in all_package_names: missing_in_allowlist.add(dep) @@ -612,75 +595,71 @@ def main(): pprint(missing_in_allowlist) keep.update(_keep) - r6json = read_data_source("bioconda_linux-64") _keep = {} missing_in_allowlist = set() keep_list = ( - 'perl-graphviz', - 'perl-file-which', - 'perl-ipc-run', - 'perl-libwww-perl', - 'perl-parse-recdescent', - 'perl-test-pod', - 'perl-threaded', - 'perl-xml-twig', - 'perl-xml-xpath', - 'perl-app-cpanminus', - 'perl-encode-locale', - 'perl-file-listing', - 'perl-html-entities-numbered', - 'perl-html-formatter', - 'perl-html-parser', - 'perl-html-tidy', - 'perl-html-tree', - 'perl-http-cookies', - 'perl-http-daemon', - 'perl-http-date', - 'perl-http-message', - 'perl-http-negotiate', - 'perl-io-tty', - 'perl-lwp-mediatypes', - 'perl-net-http', - 'perl-ntlm', - 'perl-tie-ixhash', - 'perl-uri', - 'perl-www-robotrules', - 'perl-xml-parser', - 'perl-xml-xpathengine', - 'perl-digest-hmac', - 'perl-font-afm', - 'perl-html-tagset', - 'perl-io-html', - 'perl-io-socket-ssl', - 'perl-scalar-list-utils', - 'tidyp', - 'perl-net-ssleay', - 'perl-mime-base64', - 'perl-xsloader', - 'perl-test-more', - - 'perl-carp', - 'perl-encode', - 'perl-exporter', - 'perl-getopt-long', - 'perl-lib', - 'perl-pod-usage', - 'perl-time-hires', - - 'perl-pod-escapes', - 'perl-extutils-makemaker', - 'perl-test', - 'perl-parent', - 'perl-data-dumper', - 'perl-test-harness', - + "perl-graphviz", + "perl-file-which", + "perl-ipc-run", + "perl-libwww-perl", + "perl-parse-recdescent", + "perl-test-pod", + "perl-threaded", + "perl-xml-twig", + "perl-xml-xpath", + "perl-app-cpanminus", + "perl-encode-locale", + "perl-file-listing", + "perl-html-entities-numbered", + "perl-html-formatter", + "perl-html-parser", + "perl-html-tidy", + "perl-html-tree", + "perl-http-cookies", + "perl-http-daemon", + "perl-http-date", + "perl-http-message", + "perl-http-negotiate", + "perl-io-tty", + "perl-lwp-mediatypes", + "perl-net-http", + "perl-ntlm", + "perl-tie-ixhash", + "perl-uri", + "perl-www-robotrules", + "perl-xml-parser", + "perl-xml-xpathengine", + "perl-digest-hmac", + "perl-font-afm", + "perl-html-tagset", + "perl-io-html", + "perl-io-socket-ssl", + "perl-scalar-list-utils", + "tidyp", + "perl-net-ssleay", + "perl-mime-base64", + "perl-xsloader", + "perl-test-more", + "perl-carp", + "perl-encode", + "perl-exporter", + "perl-getopt-long", + "perl-lib", + "perl-pod-usage", + "perl-time-hires", + "perl-pod-escapes", + "perl-extutils-makemaker", + "perl-test", + "perl-parent", + "perl-data-dumper", + "perl-test-harness", ) all_package_names = {info["name"] for info in keep.values()} for fn, info in r6json["packages"].items(): if info["name"] in keep_list: _keep[fn] = info - for dep in info['depends']: + for dep in info["depends"]: dep = dep.split()[0] if dep not in keep_list and dep not in all_package_names: missing_in_allowlist.add(dep) @@ -689,14 +668,15 @@ def main(): pprint(missing_in_allowlist) # patch 'perl-*' to include an explicit dependency on perl, as from the 'perl-threaded' package - perl_info_dicts = tuple(info for info in _keep.values() if info['name'].startswith('perl-')) + perl_info_dicts = tuple( + info for info in _keep.values() if info["name"].startswith("perl-") + ) for info in perl_info_dicts: - if not any(dep.startswith("perl ") for dep in info['depends']): - info['depends'].append('perl 5.22.0*') + if not any(dep.startswith("perl ") for dep in info["depends"]): + info["depends"].append("perl 5.22.0*") keep.update(_keep) - additional_records = { "python-3.6.2-hda45abc_19.tar.bz2": { # later hash, earlier timestamp "build": "hda45abc_19", @@ -711,7 +691,7 @@ def main(): "sqlite >=3.20.1,<4.0a0", "tk 8.6.*", "xz >=5.2.3,<6.0a0", - "zlib >=1.2.11,<1.3.0a0" + "zlib >=1.2.11,<1.3.0a0", ], "license": "PSF", "md5": "bdc6db1adbe7268e3ecbae13ec02066a", @@ -720,16 +700,12 @@ def main(): "size": 28300090, "subdir": "linux-64", "timestamp": 1507190714033, - "version": "3.6.2" + "version": "3.6.2", }, "sqlite-3.20.1-haaaaaaa_4.tar.bz2": { # deep cyclical dependency "build": "haaaaaaa_4", "build_number": 4, - "depends": [ - "libedit", - "libgcc-ng >=7.2.0", - "jinja2 2.9.6" - ], + "depends": ["libedit", "libgcc-ng >=7.2.0", "jinja2 2.9.6"], "license": "Public-Domain (http://www.sqlite.org/copyright.html)", "md5": "deadbeefdd677bc3ed98ddd4deadbeef", "name": "sqlite", @@ -737,7 +713,7 @@ def main(): "size": 1540584, "subdir": "linux-64", "timestamp": 1505666646842, - "version": "3.20.1" + "version": "3.20.1", }, "python-digest-1.1.1-py2_0.tar.bz2": { "build": "py2_0", @@ -872,27 +848,32 @@ def main(): keep.update(additional_records) - python_362_records = tuple(info for info in _keep.values() - if info['name'] == "python" and info["version"] == "3.6.2") + python_362_records = tuple( + info + for info in _keep.values() + if info["name"] == "python" and info["version"] == "3.6.2" + ) assert not any(info["build_number"] > 19 for info in python_362_records) all_package_names = {info["name"] for info in keep.values()} ignore_names = { - 'nlopt', + "nlopt", } missing = set() for info in keep.values(): - for line in info['depends']: - package_name = line.split(' ')[0] - if package_name not in all_package_names and package_name not in ignore_names: + for line in info["depends"]: + package_name = line.split(" ")[0] + if ( + package_name not in all_package_names + and package_name not in ignore_names + ): missing.add(package_name) if missing: print(">>> missing final <<<") pprint(missing) - - with open(join(dirname(__file__), 'index4.json'), 'w') as fh: - fh.write(json.dumps(keep, indent=2, sort_keys=True, separators=(',', ': '))) + with open(join(dirname(__file__), "index4.json"), "w") as fh: + fh.write(json.dumps(keep, indent=2, sort_keys=True, separators=(",", ": "))) if __name__ == "__main__": diff --git a/tests/data/build-index5-json.py b/tests/data/build-index5-json.py index fac223228cd..708aff7fbd7 100644 --- a/tests/data/build-index5-json.py +++ b/tests/data/build-index5-json.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json from os.path import abspath, dirname, join from pprint import pprint @@ -9,11 +8,18 @@ DATA_DIR = abspath(join(dirname(__file__), "repodata")) + def save_data_source(url, name): raw_repodata_str = fetch_repodata_remote_request(url, None, None) json.loads(raw_repodata_str) - with open(join(DATA_DIR, name + ".json"), 'w') as fh: - json.dump(json.loads(raw_repodata_str), fh, indent=2, sort_keys=True, separators=(',', ': ')) + with open(join(DATA_DIR, name + ".json"), "w") as fh: + json.dump( + json.loads(raw_repodata_str), + fh, + indent=2, + sort_keys=True, + separators=(",", ": "), + ) def read_data_source(name): @@ -25,90 +31,82 @@ def main(): r1json = read_data_source("main_win-64") packages = {} - packages.update(r1json['packages']) + packages.update(r1json["packages"]) keep_list = ( - - 'python', - 'vs2008_runtime', - 'vs2015_runtime', - 'vc', - - 'requests', - 'urllib3', - 'idna', - 'chardet', - 'certifi', - 'pyopenssl', - 'cryptography', - 'ipaddress', - 'pysocks', - 'win_inet_pton', - 'openssl', - 'cffi', - 'enum34', - 'six', - 'asn1crypto', - 'pycparser', - 'ca-certificates', - - 'pip', - 'colorama', - 'progress', - 'html5lib', - 'wheel', - 'distlib', - 'packaging', - 'lockfile', - 'webencodings', - 'cachecontrol', - 'pyparsing', - 'msgpack-python', - - 'conda', - 'menuinst', - 'futures', - 'ruamel_yaml', - 'pycosat', - 'conda-env', - 'yaml', - 'pywin32', - 'cytoolz', - 'toolz', - - 'conda-build', - 'pyyaml', - 'jinja2', - 'pkginfo', - 'contextlib2', - 'beautifulsoup4', - 'conda-verify', - 'filelock', - 'glob2', - 'psutil', - 'scandir', - 'setuptools', - 'markupsafe', - 'wincertstore', - - 'click', - 'future', - 'backports.functools_lru_cache', - 'cryptography-vectors', - 'backports', - - 'colour', - 'affine', - + "python", + "vs2008_runtime", + "vs2015_runtime", + "vc", + "requests", + "urllib3", + "idna", + "chardet", + "certifi", + "pyopenssl", + "cryptography", + "ipaddress", + "pysocks", + "win_inet_pton", + "openssl", + "cffi", + "enum34", + "six", + "asn1crypto", + "pycparser", + "ca-certificates", + "pip", + "colorama", + "progress", + "html5lib", + "wheel", + "distlib", + "packaging", + "lockfile", + "webencodings", + "cachecontrol", + "pyparsing", + "msgpack-python", + "conda", + "menuinst", + "futures", + "ruamel_yaml", + "pycosat", + "conda-env", + "yaml", + "pywin32", + "cytoolz", + "toolz", + "conda-build", + "pyyaml", + "jinja2", + "pkginfo", + "contextlib2", + "beautifulsoup4", + "conda-verify", + "filelock", + "glob2", + "psutil", + "scandir", + "setuptools", + "markupsafe", + "wincertstore", + "click", + "future", + "backports.functools_lru_cache", + "cryptography-vectors", + "backports", + "colour", + "affine", ) keep = {} missing_in_allowlist = set() for fn, info in packages.items(): - if info['name'] in keep_list: + if info["name"] in keep_list: keep[fn] = info - for dep in info['depends']: + for dep in info["depends"]: dep = dep.split()[0] if dep not in keep_list: missing_in_allowlist.add(dep) @@ -118,10 +116,10 @@ def main(): pprint(missing_in_allowlist) r2json = read_data_source("conda-test_noarch") - keep.update(r2json['packages']) + keep.update(r2json["packages"]) r3json = read_data_source("main_noarch") - keep.update(r3json['packages']) + keep.update(r3json["packages"]) # additional_records = { # "python-3.6.2-hda45abc_19.tar.bz2": { # later hash, earlier timestamp @@ -152,8 +150,8 @@ def main(): # # keep.update(additional_records) - with open(join(dirname(__file__), 'index5.json'), 'w') as fh: - fh.write(json.dumps(keep, indent=2, sort_keys=True, separators=(',', ': '))) + with open(join(dirname(__file__), "index5.json"), "w") as fh: + fh.write(json.dumps(keep, indent=2, sort_keys=True, separators=(",", ": "))) if __name__ == "__main__": diff --git a/tests/data/env_metadata/__init__.py b/tests/data/env_metadata/__init__.py index 0211ee5bcae..00bbb740876 100644 --- a/tests/data/env_metadata/__init__.py +++ b/tests/data/env_metadata/__init__.py @@ -86,15 +86,19 @@ # Test environment installed using either `pip install ` or # `python setup.py install` -PATH_TEST_ENV_1 = os.path.join(HERE, 'envpy27osx') -PATH_TEST_ENV_2 = os.path.join(HERE, 'envpy37osx_whl') -PATH_TEST_ENV_3 = os.path.join(HERE, 'envpy37win') -PATH_TEST_ENV_4 = os.path.join(HERE, 'envpy27win_whl') - -METADATA_241_PATH = os.path.join(HERE, 'pep241', 'PKG-INFO') -METADATA_314_PATH = os.path.join(HERE, 'pep314', 'PKG-INFO') -METADATA_345_PATH = os.path.join(HERE, 'pep345', 'PKG-INFO') -METADATA_566_PATH = os.path.join(HERE, 'pep566', 'PKG-INFO') - -METADATA_VERSION_PATHS = (METADATA_241_PATH, METADATA_314_PATH, - METADATA_345_PATH, METADATA_566_PATH) +PATH_TEST_ENV_1 = os.path.join(HERE, "envpy27osx") +PATH_TEST_ENV_2 = os.path.join(HERE, "envpy37osx_whl") +PATH_TEST_ENV_3 = os.path.join(HERE, "envpy37win") +PATH_TEST_ENV_4 = os.path.join(HERE, "envpy27win_whl") + +METADATA_241_PATH = os.path.join(HERE, "pep241", "PKG-INFO") +METADATA_314_PATH = os.path.join(HERE, "pep314", "PKG-INFO") +METADATA_345_PATH = os.path.join(HERE, "pep345", "PKG-INFO") +METADATA_566_PATH = os.path.join(HERE, "pep566", "PKG-INFO") + +METADATA_VERSION_PATHS = ( + METADATA_241_PATH, + METADATA_314_PATH, + METADATA_345_PATH, + METADATA_566_PATH, +) diff --git a/tests/fixtures_jlap.py b/tests/fixtures_jlap.py index 50269408356..4fef19663a3 100644 --- a/tests/fixtures_jlap.py +++ b/tests/fixtures_jlap.py @@ -101,7 +101,9 @@ def prepare_socket() -> socket.socket: def _package_server(cleanup=True, base: Path | None = None): socket = prepare_socket() context = multiprocessing.get_context("spawn") - process = context.Process(target=make_server_with_socket, args=(socket, base), daemon=True) + process = context.Process( + target=make_server_with_socket, args=(socket, base), daemon=True + ) process.start() yield socket process.kill() @@ -127,6 +129,5 @@ def package_server(package_repository_base): if __name__ == "__main__": - print(run_on_random_port()) time.sleep(60) diff --git a/tests/gateways/disk/test_create.py b/tests/gateways/disk/test_create.py index ba75bc51135..c255692ad69 100644 --- a/tests/gateways/disk/test_create.py +++ b/tests/gateways/disk/test_create.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger log = getLogger(__name__) diff --git a/tests/gateways/disk/test_delete.py b/tests/gateways/disk/test_delete.py index a8cfc417299..74192658be6 100644 --- a/tests/gateways/disk/test_delete.py +++ b/tests/gateways/disk/test_delete.py @@ -1,20 +1,19 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - -from errno import ENOENT import os +from errno import ENOENT from os.path import isdir, isfile, islink, join, lexists import pytest from conda.common.compat import on_win -from conda.gateways.disk.create import create_link, mkdir_p, TemporaryDirectory +from conda.gateways.disk.create import TemporaryDirectory, create_link, mkdir_p from conda.gateways.disk.delete import move_to_trash, rm_rf from conda.gateways.disk.link import islink, symlink from conda.gateways.disk.test import softlink_supported from conda.gateways.disk.update import touch from conda.models.enums import LinkType + from .test_permissions import _make_read_only, _try_open, tempdir @@ -26,7 +25,7 @@ def _write_file(path, content): def test_remove_file(): with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) assert isfile(test_path) _try_open(test_path) @@ -38,7 +37,7 @@ def test_remove_file(): def test_remove_file_to_trash(): with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) assert isfile(test_path) _try_open(test_path) @@ -50,7 +49,7 @@ def test_remove_file_to_trash(): def test_remove_dir(): with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) _try_open(test_path) assert isfile(test_path) @@ -112,14 +111,14 @@ def test_remove_link_to_dir(): def test_rm_rf_does_not_follow_symlinks(): with TemporaryDirectory() as tmp: # make a file in some temp folder - real_file = os.path.join(tmp, 'testfile') - with open(real_file, 'w') as f: - f.write('weee') + real_file = os.path.join(tmp, "testfile") + with open(real_file, "w") as f: + f.write("weee") # make a subfolder - subdir = os.path.join(tmp, 'subfolder') + subdir = os.path.join(tmp, "subfolder") os.makedirs(subdir) # link to the file in the subfolder - link_path = join(subdir, 'file_link') + link_path = join(subdir, "file_link") if not softlink_supported(real_file, tmp) and on_win: pytest.skip("softlink not supported") @@ -133,7 +132,7 @@ def test_rm_rf_does_not_follow_symlinks(): def test_move_to_trash(): with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) _try_open(test_path) assert isdir(td) @@ -144,8 +143,9 @@ def test_move_to_trash(): def test_move_path_to_trash_couldnt(): from conda.gateways.disk.delete import move_path_to_trash + with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) _try_open(test_path) assert isdir(td) @@ -155,8 +155,9 @@ def test_move_path_to_trash_couldnt(): def test_backoff_unlink(): from conda.gateways.disk.delete import backoff_rmdir + with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) _try_open(test_path) assert isdir(td) @@ -166,17 +167,19 @@ def test_backoff_unlink(): def test_backoff_unlink_doesnt_exist(): from conda.gateways.disk.delete import backoff_rmdir + with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) try: - backoff_rmdir(join(test_path, 'some', 'path', 'in', 'utopia')) + backoff_rmdir(join(test_path, "some", "path", "in", "utopia")) except Exception as e: assert e.value.errno == ENOENT def test_try_rmdir_all_empty_doesnt_exist(): from conda.gateways.disk.delete import try_rmdir_all_empty + with tempdir() as td: assert isdir(td) try_rmdir_all_empty(td) diff --git a/tests/gateways/disk/test_link.py b/tests/gateways/disk/test_link.py index b58b1af5ed8..1cd901f0a5c 100644 --- a/tests/gateways/disk/test_link.py +++ b/tests/gateways/disk/test_link.py @@ -1,19 +1,18 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from logging import getLogger import os -from os.path import join, isdir, lexists, isfile, exists +import uuid +from logging import getLogger +from os.path import exists, isdir, isfile, join, lexists from tempfile import gettempdir from unittest import TestCase -import uuid import pytest from conda.common.compat import on_win from conda.gateways.disk.create import mkdir_p from conda.gateways.disk.delete import rm_rf -from conda.gateways.disk.link import link, islink, readlink, symlink +from conda.gateways.disk.link import islink, link, readlink, symlink from conda.gateways.disk.test import softlink_supported from conda.gateways.disk.update import touch @@ -21,7 +20,6 @@ class LinkSymlinkUnlinkIslinkReadlinkTests(TestCase): - def setUp(self): tempdirdir = gettempdir() dirname = str(uuid.uuid4())[:8] @@ -34,8 +32,8 @@ def tearDown(self): assert not lexists(self.test_dir) def test_hard_link(self): - path1_real_file = join(self.test_dir, 'path1_real_file') - path2_second_inode = join(self.test_dir, 'path2_second_inode') + path1_real_file = join(self.test_dir, "path1_real_file") + path2_second_inode = join(self.test_dir, "path2_second_inode") touch(path1_real_file) assert isfile(path1_real_file) assert not islink(path1_real_file) @@ -56,8 +54,8 @@ def test_hard_link(self): assert not lexists(path1_real_file) def test_soft_link(self): - path1_real_file = join(self.test_dir, 'path1_real_file') - path2_symlink = join(self.test_dir, 'path2_symlink') + path1_real_file = join(self.test_dir, "path1_real_file") + path2_symlink = join(self.test_dir, "path2_symlink") touch(path1_real_file) assert isfile(path1_real_file) assert not islink(path1_real_file) @@ -65,7 +63,6 @@ def test_soft_link(self): if not softlink_supported(path1_real_file, self.test_dir) and on_win: pytest.skip("softlink not supported") - symlink(path1_real_file, path2_symlink) assert exists(path2_symlink) assert lexists(path2_symlink) @@ -75,7 +72,9 @@ def test_soft_link(self): # Windows Python >3.7, readlink actually gives something that starts with \\?\ # \\?\C:\users\appveyor\appdata\local\temp\1\c571cb0c\path1_real_file - assert os.lstat(path1_real_file).st_nlink == os.lstat(path2_symlink).st_nlink == 1 + assert ( + os.lstat(path1_real_file).st_nlink == os.lstat(path2_symlink).st_nlink == 1 + ) os.unlink(path1_real_file) assert not isfile(path1_real_file) diff --git a/tests/gateways/disk/test_permissions.py b/tests/gateways/disk/test_permissions.py index 201ae0fd370..71f1595488c 100644 --- a/tests/gateways/disk/test_permissions.py +++ b/tests/gateways/disk/test_permissions.py @@ -1,22 +1,28 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - +import errno import os import uuid - -import errno -import pytest -from errno import ENOENT, EACCES, EROFS, EPERM -from shutil import rmtree from contextlib import contextmanager +from errno import EACCES, ENOENT, EPERM, EROFS +from os.path import isfile, join, lexists +from shutil import rmtree +from stat import ( + S_IRGRP, + S_IROTH, + S_IRUSR, + S_IRWXG, + S_IRWXO, + S_IRWXU, + S_IXGRP, + S_IXOTH, + S_IXUSR, +) from tempfile import gettempdir -from os.path import join, isfile, lexists -from stat import S_IRUSR, S_IRGRP, S_IROTH -from stat import S_IRWXG, S_IRWXO, S_IRWXU -from stat import S_IXUSR, S_IXGRP, S_IXOTH from unittest.mock import patch +import pytest + from conda.gateways.disk.update import touch @@ -40,7 +46,7 @@ def tempdir(): def _remove_read_only(func, path, exc): excvalue = exc[1] if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES: - os.chmod(path, S_IRWXU| S_IRWXG| S_IRWXO) + os.chmod(path, S_IRWXU | S_IRWXG | S_IRWXO) func(path) else: pass @@ -52,7 +58,7 @@ def _make_read_only(path): def _can_write_file(test, content): try: - with open(test, 'w+') as fh: + with open(test, "w+") as fh: fh.write(content) fh.close() if os.stat(test).st_size == 0.0: @@ -60,14 +66,14 @@ def _can_write_file(test, content): else: return True except Exception as e: - eno = getattr(e, 'errono', None) + eno = getattr(e, "errono", None) if eno == 13: return False def _try_open(path): try: - f = open(path, 'a+') + f = open(path, "a+") except: raise else: @@ -80,8 +86,9 @@ def _can_execute(path): def test_make_writable(): from conda.gateways.disk.permissions import make_writable + with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) assert isfile(test_path) _try_open(test_path) @@ -96,16 +103,18 @@ def test_make_writable(): def test_make_writable_doesnt_exist(): from conda.gateways.disk.permissions import make_writable + with pytest.raises((IOError, OSError)) as exc: - make_writable(join('some', 'path', 'that', 'definitely', 'doesnt', 'exist')) + make_writable(join("some", "path", "that", "definitely", "doesnt", "exist")) assert exc.value.errno == ENOENT def test_make_writable_dir_EPERM(): import conda.gateways.disk.permissions from conda.gateways.disk.permissions import make_writable - with patch.object(conda.gateways.disk.permissions, 'chmod') as chmod_mock: - chmod_mock.side_effect = IOError(EPERM, 'some message', 'foo') + + with patch.object(conda.gateways.disk.permissions, "chmod") as chmod_mock: + chmod_mock.side_effect = IOError(EPERM, "some message", "foo") with tempdir() as td: assert not make_writable(td) @@ -113,8 +122,9 @@ def test_make_writable_dir_EPERM(): def test_make_writable_dir_EACCES(): import conda.gateways.disk.permissions from conda.gateways.disk.permissions import make_writable - with patch.object(conda.gateways.disk.permissions, 'chmod') as chmod_mock: - chmod_mock.side_effect = IOError(EACCES, 'some message', 'foo') + + with patch.object(conda.gateways.disk.permissions, "chmod") as chmod_mock: + chmod_mock.side_effect = IOError(EACCES, "some message", "foo") with tempdir() as td: assert not make_writable(td) @@ -122,16 +132,18 @@ def test_make_writable_dir_EACCES(): def test_make_writable_dir_EROFS(): import conda.gateways.disk.permissions from conda.gateways.disk.permissions import make_writable - with patch.object(conda.gateways.disk.permissions, 'chmod') as chmod_mock: - chmod_mock.side_effect = IOError(EROFS, 'some message', 'foo') + + with patch.object(conda.gateways.disk.permissions, "chmod") as chmod_mock: + chmod_mock.side_effect = IOError(EROFS, "some message", "foo") with tempdir() as td: assert not make_writable(td) def test_recursive_make_writable(): from conda.gateways.disk.permissions import recursive_make_writable + with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) assert isfile(test_path) _try_open(test_path) @@ -146,8 +158,9 @@ def test_recursive_make_writable(): def test_make_executable(): from conda.gateways.disk.permissions import make_executable + with tempdir() as td: - test_path = join(td, 'test_path') + test_path = join(td, "test_path") touch(test_path) assert isfile(test_path) _try_open(test_path) diff --git a/tests/gateways/disk/test_read.py b/tests/gateways/disk/test_read.py index a9c72854840..549bd574dbe 100644 --- a/tests/gateways/disk/test_read.py +++ b/tests/gateways/disk/test_read.py @@ -1,19 +1,17 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -from os.path import isdir, join, dirname +from os.path import dirname, isdir, join from pprint import pprint +import pytest + from conda.common.compat import on_win from conda.common.path import get_python_site_packages_short_path from conda.common.serialize import json_dump, json_load from conda.gateways.disk.read import read_python_record -import pytest -from tests.data.env_metadata import ( - __file__ as env_metadata_file, -) -ENV_METADATA_DIR = dirname(env_metadata_file) +from tests.data.env_metadata import __file__ as env_metadata_file +ENV_METADATA_DIR = dirname(env_metadata_file) def test_scrapy_py36_osx_whl(): @@ -43,13 +41,13 @@ def test_scrapy_py36_osx_whl(): "service-identity", "six >=1.5.2", "twisted >=13.1.0", - "w3lib >=1.17.0" + "w3lib >=1.17.0", ], "fn": "Scrapy-1.5.1.dist-info", "name": "scrapy", "package_type": "virtual_python_wheel", "subdir": "pypi", - "version": "1.5.1" + "version": "1.5.1", } print(json_dump(files)) print(json_dump(paths_data["paths"])) @@ -60,21 +58,21 @@ def test_scrapy_py36_osx_whl(): "_path": sp_dir + "/scrapy/core/scraper.py", "path_type": "hardlink", "sha256": "2559X9n2z1YKdFV9ElMRD6_88LIdqH1a2UwQimStt2k", - "size_in_bytes": 9960 + "size_in_bytes": 9960, } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/scrapy/core/__pycache__/scraper.cpython-36.pyc", "path_type": "hardlink", "sha256": None, - "size_in_bytes": None + "size_in_bytes": None, } assert pd2 in paths_data["paths"] pd3 = { "_path": "../bin/scrapy" if on_win else "bin/scrapy", "path_type": "hardlink", "sha256": "RncAAoxSEnSi_0VIopaRxsq6kryQGL61YbEweN2TW3g", - "size_in_bytes": 268 + "size_in_bytes": 268, } assert pd3 in paths_data["paths"] @@ -103,13 +101,13 @@ def test_twilio_py36_osx_whl(): "python 3.6.*", "pytz", "requests >=2.0.0", - "six" + "six", ], "fn": "twilio-6.16.1.dist-info", "name": "twilio", "package_type": "virtual_python_wheel", "subdir": "pypi", - "version": "6.16.1" + "version": "6.16.1", } print(json_dump(files)) print(json_dump(paths_data["paths"])) @@ -120,14 +118,14 @@ def test_twilio_py36_osx_whl(): "_path": sp_dir + "/twilio/compat.py", "path_type": "hardlink", "sha256": "sJ1t7CKvxpipiX5cyH1YwXTf3n_FsLf_taUhuCVsCwE", - "size_in_bytes": 517 + "size_in_bytes": 517, } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/twilio/jwt/__pycache__/compat.cpython-36.pyc", "path_type": "hardlink", "sha256": None, - "size_in_bytes": None + "size_in_bytes": None, } assert pd2 in paths_data["paths"] @@ -147,36 +145,31 @@ def test_pyjwt_py36_osx_whl(): "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", - "constrains": [ - "cryptography >=1.4", - "pytest <4,>3" - ], - "depends": [ - "python 3.6.*" - ], + "constrains": ["cryptography >=1.4", "pytest <4,>3"], + "depends": ["python 3.6.*"], "fn": "PyJWT-1.6.4.dist-info", "name": "pyjwt", "package_type": "virtual_python_wheel", "subdir": "pypi", - "version": "1.6.4" + "version": "1.6.4", } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("3.6") assert ("../bin/pyjwt" if on_win else "bin/pyjwt") in files - assert sp_dir + '/jwt/__pycache__/__init__.cpython-36.pyc' in files + assert sp_dir + "/jwt/__pycache__/__init__.cpython-36.pyc" in files pd1 = { "_path": "../bin/pyjwt" if on_win else "bin/pyjwt", "path_type": "hardlink", "sha256": "wZET_24uZDEpsMdhAQ78Ass2k-76aQ59yPSE4DTE2To", - "size_in_bytes": 260 + "size_in_bytes": 260, } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/jwt/contrib/__pycache__/__init__.cpython-36.pyc", "path_type": "hardlink", "sha256": None, - "size_in_bytes": None + "size_in_bytes": None, } assert pd2 in paths_data["paths"] @@ -202,7 +195,7 @@ def test_cherrypy_py36_osx_whl(): "name": "cherrypy", "package_type": "virtual_python_wheel", "subdir": "pypi", - "version": "17.2.0" + "version": "17.2.0", } assert constrains == [ @@ -211,7 +204,7 @@ def test_cherrypy_py36_osx_whl(): "pytest >=2.8", "python-memcached >=1.58", "routes >=2.3.1", - "rst-linker >=1.9" + "rst-linker >=1.9", ] if on_win: assert depends == [ @@ -220,7 +213,7 @@ def test_cherrypy_py36_osx_whl(): "portend >=2.1.1", "python 3.6.*", "pywin32", - "six >=1.11.0" + "six >=1.11.0", ] else: assert depends == [ @@ -228,7 +221,7 @@ def test_cherrypy_py36_osx_whl(): "more-itertools", "portend >=2.1.1", "python 3.6.*", - "six >=1.11.0" + "six >=1.11.0", ] @@ -259,13 +252,13 @@ def test_scrapy_py27_osx_no_binary(): "service-identity", "six >=1.5.2", "twisted >=13.1.0", - "w3lib >=1.17.0" + "w3lib >=1.17.0", ], "fn": "Scrapy-1.5.1-py2.7.egg-info", "name": "scrapy", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", - "version": "1.5.1" + "version": "1.5.1", } print(json_dump(files)) print(json_dump(paths_data["paths"])) @@ -275,17 +268,17 @@ def test_scrapy_py27_osx_no_binary(): assert ("../bin/scrapy" if on_win else "bin/scrapy") in files pd1 = { "_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.py", - "path_type": "hardlink" + "path_type": "hardlink", } assert pd1 in paths_data["paths"] pd2 = { "_path": sp_dir + "/scrapy/contrib/downloadermiddleware/decompression.pyc", - "path_type": "hardlink" + "path_type": "hardlink", } assert pd2 in paths_data["paths"] pd3 = { "_path": "../bin/scrapy" if on_win else "bin/scrapy", - "path_type": "hardlink" + "path_type": "hardlink", } assert pd3 in paths_data["paths"] @@ -308,33 +301,21 @@ def test_twilio_py27_osx_no_binary(): "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], - "depends": [ - "pyjwt >=1.4.2", - "python 2.7.*", - "pytz", - "requests >=2.0.0", - "six" - ], + "depends": ["pyjwt >=1.4.2", "python 2.7.*", "pytz", "requests >=2.0.0", "six"], "fn": "twilio-6.16.1-py2.7.egg-info", "name": "twilio", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", - "version": "6.16.1" + "version": "6.16.1", } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") assert sp_dir + "/twilio/compat.py" in files assert sp_dir + "/twilio/compat.pyc" in files - pd1 = { - "_path": sp_dir + "/twilio/compat.py", - "path_type": "hardlink" - } + pd1 = {"_path": sp_dir + "/twilio/compat.py", "path_type": "hardlink"} assert pd1 in paths_data["paths"] - pd2 = { - "_path": sp_dir + "/twilio/jwt/compat.pyc", - "path_type": "hardlink" - } + pd2 = {"_path": sp_dir + "/twilio/jwt/compat.pyc", "path_type": "hardlink"} assert pd2 in paths_data["paths"] @@ -353,33 +334,22 @@ def test_pyjwt_py27_osx_no_binary(): "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", - "constrains": [ - "cryptography >=1.4", - "pytest <4,>3" - ], - "depends": [ - "python 2.7.*" - ], + "constrains": ["cryptography >=1.4", "pytest <4,>3"], + "depends": ["python 2.7.*"], "fn": "PyJWT-1.6.4-py2.7.egg-info", "name": "pyjwt", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", - "version": "1.6.4" + "version": "1.6.4", } print(json_dump(files)) print(json_dump(paths_data["paths"])) sp_dir = get_python_site_packages_short_path("2.7") - assert ('../bin/pyjwt' if on_win else 'bin/pyjwt') in files - assert sp_dir + '/jwt/__init__.pyc' in files - pd1 = { - "_path": "../bin/pyjwt" if on_win else "bin/pyjwt", - "path_type": "hardlink" - } + assert ("../bin/pyjwt" if on_win else "bin/pyjwt") in files + assert sp_dir + "/jwt/__init__.pyc" in files + pd1 = {"_path": "../bin/pyjwt" if on_win else "bin/pyjwt", "path_type": "hardlink"} assert pd1 in paths_data["paths"] - pd2 = { - "_path": sp_dir + "/jwt/contrib/__init__.pyc", - "path_type": "hardlink" - } + pd2 = {"_path": sp_dir + "/jwt/contrib/__init__.pyc", "path_type": "hardlink"} assert pd2 in paths_data["paths"] @@ -404,14 +374,14 @@ def test_cherrypy_py27_osx_no_binary(): "name": "cherrypy", "package_type": "virtual_python_egg_manageable", "subdir": "pypi", - "version": "17.2.0" + "version": "17.2.0", } assert constrains == [ "jaraco-packaging >=3.2", "pytest >=2.8", "python-memcached >=1.58", "routes >=2.3.1", - "rst-linker >=1.9" + "rst-linker >=1.9", ] if on_win: assert depends == [ @@ -420,7 +390,7 @@ def test_cherrypy_py27_osx_no_binary(): "portend >=2.1.1", "python 2.7.*", "pywin32", - "six >=1.11.0" + "six >=1.11.0", ] else: assert depends == [ @@ -428,7 +398,7 @@ def test_cherrypy_py27_osx_no_binary(): "more-itertools", "portend >=2.1.1", "python 2.7.*", - "six >=1.11.0" + "six >=1.11.0", ] @@ -448,14 +418,12 @@ def test_six_py27_osx_no_binary_unmanageable(): "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], - "depends": [ - "python 2.7.*" - ], + "depends": ["python 2.7.*"], "fn": "six-1.11.0-py2.7.egg-info", "name": "six", "package_type": "virtual_python_egg_unmanageable", "subdir": "pypi", - "version": "1.11.0" + "version": "1.11.0", } assert not files assert not prefix_rec.paths_data.paths diff --git a/tests/gateways/test_connection.py b/tests/gateways/test_connection.py index d08bb7d8bd6..11df6070fb8 100644 --- a/tests/gateways/test_connection.py +++ b/tests/gateways/test_connection.py @@ -1,7 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - from logging import getLogger from pathlib import Path from unittest import TestCase @@ -18,7 +16,7 @@ from conda.gateways.connection.session import CondaHttpAuth, CondaSession from conda.gateways.disk.delete import rm_rf from conda.testing.gateways.fixtures import MINIO_EXE -from conda.testing.integration import make_temp_env, env_var +from conda.testing.integration import env_var, make_temp_env log = getLogger(__name__) @@ -96,7 +94,9 @@ def test_s3_server(minio_s3_server): Config(signature_version="s3v4"), # config ) with pytest.raises(CondaExitZero): - with patch.object(boto3.session.Session.resource, "__defaults__", patched_defaults): + with patch.object( + boto3.session.Session.resource, "__defaults__", patched_defaults + ): # the .conda files in this repo are somehow corrupted with env_var("CONDA_USE_ONLY_TAR_BZ2", "True"): with make_temp_env( diff --git a/tests/gateways/test_jlap.py b/tests/gateways/test_jlap.py index 17fa3fb2b1d..19bc1e18cb6 100644 --- a/tests/gateways/test_jlap.py +++ b/tests/gateways/test_jlap.py @@ -52,7 +52,8 @@ def test_jlap_fetch(package_server: socket, tmp_path: Path, mocker): ) patched = mocker.patch( - "conda.gateways.repodata.jlap.fetch.download_and_hash", wraps=fetch.download_and_hash + "conda.gateways.repodata.jlap.fetch.download_and_hash", + wraps=fetch.download_and_hash, ) state = {} @@ -431,7 +432,11 @@ def test_jlap_errors( @pytest.mark.parametrize("use_jlap", [True, False]) def test_jlap_cache_clock( - package_server: socket, tmp_path: Path, package_repository_base: Path, mocker, use_jlap: bool + package_server: socket, + tmp_path: Path, + package_repository_base: Path, + mocker, + use_jlap: bool, ): """ Test that we add another "local_repodata_ttl" (an alternative to @@ -508,7 +513,9 @@ def test_jlap_cache_clock( assert cache.load_state()["refresh_ns"] == later2 # check that non-expried cache avoids updating refresh_ns. - mocker.patch("time.time_ns", return_value=now + ((3 * local_repodata_ttl + 4) * int(1e9))) + mocker.patch( + "time.time_ns", return_value=now + ((3 * local_repodata_ttl + 4) * int(1e9)) + ) sd.load() assert cache.load_state()["refresh_ns"] == later2 @@ -536,7 +543,9 @@ class Response: raise fetch.HTTPError(response=Response()) - mocker.patch("conda.gateways.repodata.jlap.fetch.download_and_hash", side_effect=error) + mocker.patch( + "conda.gateways.repodata.jlap.fetch.download_and_hash", side_effect=error + ) with pytest.raises(CondaHTTPError, match="HTTP 405"): repo.repodata({}) @@ -624,9 +633,13 @@ def test_jlap_get_place(): """ (probably soon to be removed) helper function to get cache filenames. """ - place = fetch.get_place("https://repo.anaconda.com/main/linux-64/current_repodata.json").name + place = fetch.get_place( + "https://repo.anaconda.com/main/linux-64/current_repodata.json" + ).name assert ".c" in place - place2 = fetch.get_place("https://repo.anaconda.com/main/linux-64/repodata.json").name + place2 = fetch.get_place( + "https://repo.anaconda.com/main/linux-64/repodata.json" + ).name assert ".c" not in place2 diff --git a/tests/gateways/test_logging.py b/tests/gateways/test_logging.py index 5d249cea3e6..bb84ede36f0 100644 --- a/tests/gateways/test_logging.py +++ b/tests/gateways/test_logging.py @@ -1,18 +1,19 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +from logging import getLogger from conda.auxlib.ish import dals from conda.gateways.logging import TokenURLFilter -from logging import getLogger log = getLogger(__name__) TR = TokenURLFilter.TOKEN_REPLACE + def test_token_replace_big_string(): - test_string = dals(""" + test_string = dals( + """ 555.123.4567 +1-(800)-555-2468 foo@demo.net bar.ba@test.co.uk www.demo.com http://foo.co.uk/t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar @@ -29,8 +30,10 @@ def test_token_replace_big_string(): http://foo.co.uk:8080/t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar - """) - result_string = dals(""" + """ + ) + result_string = dals( + """ 555.123.4567 +1-(800)-555-2468 foo@demo.net bar.ba@test.co.uk www.demo.com http://foo.co.uk/t//more/stuf/like/this.html?q=bar @@ -47,15 +50,24 @@ def test_token_replace_big_string(): http://foo.co.uk:8080/t//more/stuf/like/this.html?q=bar - """) + """ + ) print(TR(test_string)) assert TR(test_string) == result_string def test_token_replace_individual_strings(): - assert (TR("http://foo.co.uk:8080/t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar") - == "http://foo.co.uk:8080/t//more/stuf/like/this.html?q=bar") - assert (TR(" /t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar") - == " /t//more/stuf/like/this.html?q=bar") - assert (TR("/t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar") - == "/t//more/stuf/like/this.html?q=bar") + assert ( + TR( + "http://foo.co.uk:8080/t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar" + ) + == "http://foo.co.uk:8080/t//more/stuf/like/this.html?q=bar" + ) + assert ( + TR(" /t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar") + == " /t//more/stuf/like/this.html?q=bar" + ) + assert ( + TR("/t/tk-abkdehc1n38cCBDHN-cje/more/stuf/like/this.html?q=bar") + == "/t//more/stuf/like/this.html?q=bar" + ) diff --git a/tests/gateways/test_repodata_gateway.py b/tests/gateways/test_repodata_gateway.py index 5a66006dfe2..74987b6be57 100644 --- a/tests/gateways/test_repodata_gateway.py +++ b/tests/gateways/test_repodata_gateway.py @@ -23,7 +23,12 @@ ProxyError, UnavailableInvalidChannel, ) -from conda.gateways.connection import HTTPError, InvalidSchema, RequestsProxyError, SSLError +from conda.gateways.connection import ( + HTTPError, + InvalidSchema, + RequestsProxyError, + SSLError, +) from conda.gateways.repodata import ( RepodataCache, RepodataIsEmpty, @@ -82,7 +87,9 @@ def test_stale(tmp_path): cache.load() assert not cache.stale() - assert 29 < cache.timeout() < 30.1 # time difference between record and save timestamp + assert ( + 29 < cache.timeout() < 30.1 + ) # time difference between record and save timestamp # backdate cache.state["refresh_ns"] = time.time_ns() - (60 * 10**9) # type: ignore @@ -133,13 +140,20 @@ def test_coverage_repodata_state(tmp_path): assert dict(state.load()) == {} -from conda.gateways.connection import HTTPError, InvalidSchema, RequestsProxyError, SSLError +from conda.gateways.connection import ( + HTTPError, + InvalidSchema, + RequestsProxyError, + SSLError, +) from conda.gateways.repodata import RepodataIsEmpty, conda_http_errors def test_repodata_state_has_format(): # wrong has_zst format - state = RepodataState("", "", "", dict={"has_zst": {"last_checked": "Tuesday", "value": 0}}) + state = RepodataState( + "", "", "", dict={"has_zst": {"last_checked": "Tuesday", "value": 0}} + ) value, dt = state.has_format("zst") assert value is False assert isinstance(dt, datetime.datetime) diff --git a/tests/gateways/test_subprocess.py b/tests/gateways/test_subprocess.py index a2fa2202a88..87c64106214 100644 --- a/tests/gateways/test_subprocess.py +++ b/tests/gateways/test_subprocess.py @@ -1,28 +1,32 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from conda.gateways.subprocess import subprocess_call def test_subprocess_call_with_capture_output(capfd): resp = subprocess_call( - ('python -c "import sys, time; sys.stdout.write(\'1\\n\'); ' - 'sys.stderr.write(\'2\\n\'); time.sleep(.3); sys.stdout.write(\'end\\n\')"'), + ( + "python -c \"import sys, time; sys.stdout.write('1\\n'); " + "sys.stderr.write('2\\n'); time.sleep(.3); sys.stdout.write('end\\n')\"" + ), capture_output=False, ) captured = capfd.readouterr() - assert captured.out.replace('\r\n', '\n') == '1\nend\n' - assert captured.err.replace('\r\n', '\n') == '2\n' + assert captured.out.replace("\r\n", "\n") == "1\nend\n" + assert captured.err.replace("\r\n", "\n") == "2\n" assert resp.rc == 0 + def test_subprocess_call_without_capture_output(): resp = subprocess_call( - ('python -c "import sys, time; sys.stdout.write(\'1\\n\'); ' - 'sys.stderr.write(\'2\\n\'); time.sleep(.3); sys.stdout.write(\'end\\n\')"'), + ( + "python -c \"import sys, time; sys.stdout.write('1\\n'); " + "sys.stderr.write('2\\n'); time.sleep(.3); sys.stdout.write('end\\n')\"" + ), capture_output=True, ) - assert resp.stdout.replace('\r\n', '\n') == '1\nend\n' - assert resp.stderr.replace('\r\n', '\n') == "2\n" + assert resp.stdout.replace("\r\n", "\n") == "1\nend\n" + assert resp.stderr.replace("\r\n", "\n") == "2\n" assert resp.rc == 0 diff --git a/tests/http_test_server.py b/tests/http_test_server.py index 89195c0dc42..4cbe5fef8c3 100644 --- a/tests/http_test_server.py +++ b/tests/http_test_server.py @@ -32,12 +32,16 @@ def finish_request(self, request, client_address): self.RequestHandlerClass(request, client_address, self, directory=directory) def start_server(queue): - - with DualStackServer(("127.0.0.1", 0), http.server.SimpleHTTPRequestHandler) as httpd: + with DualStackServer( + ("127.0.0.1", 0), http.server.SimpleHTTPRequestHandler + ) as httpd: host, port = httpd.socket.getsockname()[:2] queue.put(httpd) url_host = f"[{host}]" if ":" in host else host - print(f"Serving HTTP on {host} port {port} " f"(http://{url_host}:{port}/) ...") + print( + f"Serving HTTP on {host} port {port} " + f"(http://{url_host}:{port}/) ..." + ) try: httpd.serve_forever() except KeyboardInterrupt: diff --git a/tests/models/test_channel.py b/tests/models/test_channel.py index ae6da206c27..6625cc1dbe0 100644 --- a/tests/models/test_channel.py +++ b/tests/models/test_channel.py @@ -6,7 +6,12 @@ from conda.auxlib.ish import dals from conda.base.constants import DEFAULT_CHANNELS -from conda.base.context import Context, conda_tests_ctxt_mgmt_def_pol, context, reset_context +from conda.base.context import ( + Context, + conda_tests_ctxt_mgmt_def_pol, + context, + reset_context, +) from conda.common.configuration import YamlRawParameter from conda.common.io import env_unmodified, env_var, env_vars from conda.common.serialize import yaml_round_trip_load @@ -22,65 +27,67 @@ class DefaultConfigChannelTests(TestCase): - @classmethod def setUpClass(cls): reset_context(()) cls.platform = context.subdir - cls.DEFAULT_URLS = ['https://repo.anaconda.com/pkgs/main/%s' % cls.platform, - 'https://repo.anaconda.com/pkgs/main/noarch', - 'https://repo.anaconda.com/pkgs/r/%s' % cls.platform, - 'https://repo.anaconda.com/pkgs/r/noarch', - ] + cls.DEFAULT_URLS = [ + "https://repo.anaconda.com/pkgs/main/%s" % cls.platform, + "https://repo.anaconda.com/pkgs/main/noarch", + "https://repo.anaconda.com/pkgs/r/%s" % cls.platform, + "https://repo.anaconda.com/pkgs/r/noarch", + ] if on_win: - cls.DEFAULT_URLS.extend(['https://repo.anaconda.com/pkgs/msys2/%s' % cls.platform, - 'https://repo.anaconda.com/pkgs/msys2/noarch']) + cls.DEFAULT_URLS.extend( + [ + "https://repo.anaconda.com/pkgs/msys2/%s" % cls.platform, + "https://repo.anaconda.com/pkgs/msys2/noarch", + ] + ) def test_channel_alias_channels(self): - channel = Channel('binstar/label/dev') + channel = Channel("binstar/label/dev") assert channel.channel_name == "binstar/label/dev" assert channel.channel_location == "conda.anaconda.org" assert channel.platform is None assert channel.package_filename is None assert channel.canonical_name == "binstar/label/dev" assert channel.urls() == [ - 'https://conda.anaconda.org/binstar/label/dev/%s' % context.subdir, - 'https://conda.anaconda.org/binstar/label/dev/noarch', + "https://conda.anaconda.org/binstar/label/dev/%s" % context.subdir, + "https://conda.anaconda.org/binstar/label/dev/noarch", ] - channel = Channel('binstar/label/dev/win-32') + channel = Channel("binstar/label/dev/win-32") assert channel.channel_name == "binstar/label/dev" assert channel.channel_location == "conda.anaconda.org" - assert channel.platform == 'win-32' + assert channel.platform == "win-32" assert channel.package_filename is None assert channel.canonical_name == "binstar/label/dev" assert channel.urls() == [ - 'https://conda.anaconda.org/binstar/label/dev/win-32', - 'https://conda.anaconda.org/binstar/label/dev/noarch', + "https://conda.anaconda.org/binstar/label/dev/win-32", + "https://conda.anaconda.org/binstar/label/dev/noarch", ] - def test_channel_host_port(self): - channel = Channel('https://192.168.0.0:8000') + channel = Channel("https://192.168.0.0:8000") assert channel.channel_name == "" assert channel.channel_location == "192.168.0.0:8000" assert channel.platform is None assert channel.package_filename is None assert channel.canonical_name == "https://192.168.0.0:8000" assert channel.urls() == [ - 'https://192.168.0.0:8000/%s' % context.subdir, - 'https://192.168.0.0:8000/noarch', + "https://192.168.0.0:8000/%s" % context.subdir, + "https://192.168.0.0:8000/noarch", ] - def test_channel_cache(self): Channel._reset_state() assert len(Channel._cache_) == 0 - dc = Channel('defaults') + dc = Channel("defaults") assert len(Channel._cache_) == 1 - dc1 = Channel('defaults') + dc1 = Channel("defaults") assert len(Channel._cache_) == 1 - dc2 = Channel('defaults') + dc2 = Channel("defaults") assert len(Channel._cache_) == 1 assert dc1 is dc @@ -90,43 +97,43 @@ def test_channel_cache(self): assert len(Channel._cache_) == 1 assert dc3 is dc - ccc = Channel('conda-canary') + ccc = Channel("conda-canary") assert len(Channel._cache_) == 2 - ccc1 = Channel('conda-canary') + ccc1 = Channel("conda-canary") assert len(Channel._cache_) == 2 assert ccc1 is ccc def test_default_channel(self): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): - dc = Channel('defaults') - assert dc.canonical_name == 'defaults' + dc = Channel("defaults") + assert dc.canonical_name == "defaults" assert dc.urls() == self.DEFAULT_URLS assert dc.subdir is None - assert str(dc) == 'defaults' + assert str(dc) == "defaults" - dc = Channel('defaults/win-32') - assert dc.canonical_name == 'defaults' - assert dc.subdir == 'win-32' - assert dc.urls()[0] == 'https://repo.anaconda.com/pkgs/main/win-32' - assert dc.urls()[1] == 'https://repo.anaconda.com/pkgs/main/noarch' - assert dc.urls()[2].endswith('/win-32') + dc = Channel("defaults/win-32") + assert dc.canonical_name == "defaults" + assert dc.subdir == "win-32" + assert dc.urls()[0] == "https://repo.anaconda.com/pkgs/main/win-32" + assert dc.urls()[1] == "https://repo.anaconda.com/pkgs/main/noarch" + assert dc.urls()[2].endswith("/win-32") def test_url_channel_w_platform(self): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): - channel = Channel('https://repo.anaconda.com/pkgs/main/osx-64') + channel = Channel("https://repo.anaconda.com/pkgs/main/osx-64") assert channel.scheme == "https" assert channel.location == "repo.anaconda.com" - assert channel.platform == 'osx-64' == channel.subdir - assert channel.name == 'pkgs/main' + assert channel.platform == "osx-64" == channel.subdir + assert channel.name == "pkgs/main" - assert channel.base_url == 'https://repo.anaconda.com/pkgs/main' - assert channel.canonical_name == 'defaults' - assert channel.url() == 'https://repo.anaconda.com/pkgs/main/osx-64' + assert channel.base_url == "https://repo.anaconda.com/pkgs/main" + assert channel.canonical_name == "defaults" + assert channel.url() == "https://repo.anaconda.com/pkgs/main/osx-64" assert channel.urls() == [ - 'https://repo.anaconda.com/pkgs/main/osx-64', - 'https://repo.anaconda.com/pkgs/main/noarch', + "https://repo.anaconda.com/pkgs/main/osx-64", + "https://repo.anaconda.com/pkgs/main/noarch", ] def test_bare_channel_http(self): @@ -142,7 +149,7 @@ def test_bare_channel_http(self): assert channel.url() == join_url(url, context.subdir) assert channel.urls() == [ join_url(url, context.subdir), - join_url(url, 'noarch'), + join_url(url, "noarch"), ] def test_bare_channel_file(self): @@ -158,36 +165,37 @@ def test_bare_channel_file(self): assert channel.url() == join_url(url, context.subdir) assert channel.urls() == [ join_url(url, context.subdir), - join_url(url, 'noarch'), + join_url(url, "noarch"), ] def test_channel_name_subdir_only(self): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): - channel = Channel('pkgs/main/win-64') + channel = Channel("pkgs/main/win-64") assert channel.scheme == "https" assert channel.location == "repo.anaconda.com" - assert channel.platform == 'win-64' == channel.subdir - assert channel.name == 'pkgs/main' + assert channel.platform == "win-64" == channel.subdir + assert channel.name == "pkgs/main" - assert channel.base_url == 'https://repo.anaconda.com/pkgs/main' - assert channel.canonical_name == 'defaults' - assert channel.url() == 'https://repo.anaconda.com/pkgs/main/win-64' + assert channel.base_url == "https://repo.anaconda.com/pkgs/main" + assert channel.canonical_name == "defaults" + assert channel.url() == "https://repo.anaconda.com/pkgs/main/win-64" assert channel.urls() == [ - 'https://repo.anaconda.com/pkgs/main/win-64', - 'https://repo.anaconda.com/pkgs/main/noarch', + "https://repo.anaconda.com/pkgs/main/win-64", + "https://repo.anaconda.com/pkgs/main/noarch", ] class AnacondaServerChannelTests(TestCase): - @classmethod def setUpClass(cls): - string = dals(""" + string = dals( + """ channel_alias: https://10.2.3.4:8080/conda/t/tk-123-45 migrated_channel_aliases: - https://conda.anaconda.org - http://10.2.3.4:7070/conda - """) + """ + ) reset_context(()) rd = { "testdata": YamlRawParameter.make_raw_parameters( @@ -204,31 +212,34 @@ def tearDownClass(cls): reset_context() def test_channel_alias_w_conda_path(self): - channel = Channel('bioconda') + channel = Channel("bioconda") assert channel.channel_name == "bioconda" assert channel.channel_location == "10.2.3.4:8080/conda" assert channel.platform is None assert channel.package_filename is None assert channel.auth is None assert channel.scheme == "https" - assert channel.canonical_name == 'bioconda' + assert channel.canonical_name == "bioconda" assert channel.urls() == [ "https://10.2.3.4:8080/conda/bioconda/%s" % self.platform, "https://10.2.3.4:8080/conda/bioconda/noarch", ] assert channel.token == "tk-123-45" assert str(channel) == "https://10.2.3.4:8080/conda/bioconda" - assert str(Channel('bioconda/linux-32')) == "https://10.2.3.4:8080/conda/bioconda/linux-32" + assert ( + str(Channel("bioconda/linux-32")) + == "https://10.2.3.4:8080/conda/bioconda/linux-32" + ) def test_channel_alias_w_subhcnnale(self): - channel = Channel('bioconda/label/dev') + channel = Channel("bioconda/label/dev") assert channel.channel_name == "bioconda/label/dev" assert channel.channel_location == "10.2.3.4:8080/conda" assert channel.platform is None assert channel.package_filename is None assert channel.auth is None assert channel.scheme == "https" - assert channel.canonical_name == 'bioconda/label/dev' + assert channel.canonical_name == "bioconda/label/dev" assert channel.urls() == [ "https://10.2.3.4:8080/conda/bioconda/label/dev/%s" % self.platform, "https://10.2.3.4:8080/conda/bioconda/label/dev/noarch", @@ -244,7 +255,7 @@ def test_custom_token_in_channel(self): assert channel.auth is None assert channel.token == "x1029384756" assert channel.scheme == "https" - assert channel.canonical_name == 'bioconda' + assert channel.canonical_name == "bioconda" assert channel.urls() == [ "https://10.2.3.4:8080/conda/bioconda/%s" % self.platform, "https://10.2.3.4:8080/conda/bioconda/noarch", @@ -301,7 +312,8 @@ def test_token_in_custom_channel(self): "https://10.2.8.9:8080/conda/bioconda/label/dev/noarch", ] assert channel.urls(with_credentials=True) == [ - "https://10.2.8.9:8080/conda/t/tk-987-321/bioconda/label/dev/%s" % self.platform, + "https://10.2.8.9:8080/conda/t/tk-987-321/bioconda/label/dev/%s" + % self.platform, "https://10.2.8.9:8080/conda/t/tk-987-321/bioconda/label/dev/noarch", ] @@ -327,7 +339,8 @@ class CustomConfigChannelTests(TestCase): @classmethod def setUp(cls): - string = dals(""" + string = dals( + """ custom_channels: darwin: https://some.url.somewhere/stuff chuck: http://user1:pass2@another.url:8080/t/tk-1234/with/path @@ -343,7 +356,8 @@ def setUp(cls): - http://192.168.0.15:8080/pkgs/anaconda - http://192.168.0.15:8080/pkgs/pro - http://192.168.0.15:8080/pkgs/msys2 - """) + """ + ) reset_context(()) rd = { "testdata": YamlRawParameter.make_raw_parameters( @@ -355,179 +369,190 @@ def setUp(cls): cls.platform = context.subdir - cls.DEFAULT_URLS = ['http://192.168.0.15:8080/pkgs/anaconda/%s' % cls.platform, - 'http://192.168.0.15:8080/pkgs/anaconda/noarch', - 'http://192.168.0.15:8080/pkgs/pro/%s' % cls.platform, - 'http://192.168.0.15:8080/pkgs/pro/noarch', - 'http://192.168.0.15:8080/pkgs/msys2/%s' % cls.platform, - 'http://192.168.0.15:8080/pkgs/msys2/noarch', - ] + cls.DEFAULT_URLS = [ + "http://192.168.0.15:8080/pkgs/anaconda/%s" % cls.platform, + "http://192.168.0.15:8080/pkgs/anaconda/noarch", + "http://192.168.0.15:8080/pkgs/pro/%s" % cls.platform, + "http://192.168.0.15:8080/pkgs/pro/noarch", + "http://192.168.0.15:8080/pkgs/msys2/%s" % cls.platform, + "http://192.168.0.15:8080/pkgs/msys2/noarch", + ] @classmethod def tearDown(cls): reset_context() def test_pkgs_main(self): - channel = Channel('pkgs/anaconda') + channel = Channel("pkgs/anaconda") assert channel.channel_name == "pkgs/anaconda" assert channel.channel_location == "192.168.0.15:8080" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'http://192.168.0.15:8080/pkgs/anaconda/%s' % self.platform, - 'http://192.168.0.15:8080/pkgs/anaconda/noarch', + "http://192.168.0.15:8080/pkgs/anaconda/%s" % self.platform, + "http://192.168.0.15:8080/pkgs/anaconda/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/anaconda') + channel = Channel("https://repo.anaconda.com/pkgs/anaconda") assert channel.channel_name == "pkgs/anaconda" assert channel.channel_location == "192.168.0.15:8080" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'http://192.168.0.15:8080/pkgs/anaconda/%s' % self.platform, - 'http://192.168.0.15:8080/pkgs/anaconda/noarch', + "http://192.168.0.15:8080/pkgs/anaconda/%s" % self.platform, + "http://192.168.0.15:8080/pkgs/anaconda/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/anaconda/noarch') + channel = Channel("https://repo.anaconda.com/pkgs/anaconda/noarch") assert channel.channel_name == "pkgs/anaconda" assert channel.channel_location == "192.168.0.15:8080" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'http://192.168.0.15:8080/pkgs/anaconda/noarch', + "http://192.168.0.15:8080/pkgs/anaconda/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/anaconda/label/dev') + channel = Channel("https://repo.anaconda.com/pkgs/anaconda/label/dev") assert channel.channel_name == "pkgs/anaconda/label/dev" assert channel.channel_location == "192.168.0.15:8080" assert channel.canonical_name == "pkgs/anaconda/label/dev" assert channel.urls() == [ - 'http://192.168.0.15:8080/pkgs/anaconda/label/dev/%s' % self.platform, - 'http://192.168.0.15:8080/pkgs/anaconda/label/dev/noarch', + "http://192.168.0.15:8080/pkgs/anaconda/label/dev/%s" % self.platform, + "http://192.168.0.15:8080/pkgs/anaconda/label/dev/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/anaconda/noarch/flask-1.0.tar.bz2') + channel = Channel( + "https://repo.anaconda.com/pkgs/anaconda/noarch/flask-1.0.tar.bz2" + ) assert channel.channel_name == "pkgs/anaconda" assert channel.channel_location == "192.168.0.15:8080" assert channel.platform == "noarch" assert channel.package_filename == "flask-1.0.tar.bz2" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'http://192.168.0.15:8080/pkgs/anaconda/noarch', + "http://192.168.0.15:8080/pkgs/anaconda/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/anaconda/noarch/flask-1.0.conda') + channel = Channel( + "https://repo.anaconda.com/pkgs/anaconda/noarch/flask-1.0.conda" + ) assert channel.channel_name == "pkgs/anaconda" assert channel.channel_location == "192.168.0.15:8080" assert channel.platform == "noarch" assert channel.package_filename == "flask-1.0.conda" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'http://192.168.0.15:8080/pkgs/anaconda/noarch', + "http://192.168.0.15:8080/pkgs/anaconda/noarch", ] def test_pkgs_pro(self): - channel = Channel('pkgs/pro') + channel = Channel("pkgs/pro") assert channel.channel_name == "pkgs/pro" assert channel.channel_location == "192.168.0.15:8080" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'http://192.168.0.15:8080/pkgs/pro/%s' % self.platform, - 'http://192.168.0.15:8080/pkgs/pro/noarch', + "http://192.168.0.15:8080/pkgs/pro/%s" % self.platform, + "http://192.168.0.15:8080/pkgs/pro/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/pro') + channel = Channel("https://repo.anaconda.com/pkgs/pro") assert channel.channel_name == "pkgs/pro" assert channel.channel_location == "repo.anaconda.com" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'https://repo.anaconda.com/pkgs/pro/%s' % self.platform, - 'https://repo.anaconda.com/pkgs/pro/noarch', + "https://repo.anaconda.com/pkgs/pro/%s" % self.platform, + "https://repo.anaconda.com/pkgs/pro/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/pro/noarch') + channel = Channel("https://repo.anaconda.com/pkgs/pro/noarch") assert channel.channel_name == "pkgs/pro" assert channel.channel_location == "repo.anaconda.com" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'https://repo.anaconda.com/pkgs/pro/noarch', + "https://repo.anaconda.com/pkgs/pro/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/pro/label/dev') + channel = Channel("https://repo.anaconda.com/pkgs/pro/label/dev") assert channel.channel_name == "pkgs/pro/label/dev" assert channel.channel_location == "repo.anaconda.com" assert channel.canonical_name == "pkgs/pro/label/dev" assert channel.urls() == [ - 'https://repo.anaconda.com/pkgs/pro/label/dev/%s' % self.platform, - 'https://repo.anaconda.com/pkgs/pro/label/dev/noarch', + "https://repo.anaconda.com/pkgs/pro/label/dev/%s" % self.platform, + "https://repo.anaconda.com/pkgs/pro/label/dev/noarch", ] - channel = Channel('https://repo.anaconda.com/pkgs/pro/noarch/flask-1.0.tar.bz2') + channel = Channel("https://repo.anaconda.com/pkgs/pro/noarch/flask-1.0.tar.bz2") assert channel.channel_name == "pkgs/pro" assert channel.channel_location == "repo.anaconda.com" assert channel.platform == "noarch" assert channel.package_filename == "flask-1.0.tar.bz2" assert channel.canonical_name == "defaults" assert channel.urls() == [ - 'https://repo.anaconda.com/pkgs/pro/noarch', + "https://repo.anaconda.com/pkgs/pro/noarch", ] def test_custom_channels(self): - channel = Channel('darwin') + channel = Channel("darwin") assert channel.channel_name == "darwin" assert channel.channel_location == "some.url.somewhere/stuff" - channel = Channel('https://some.url.somewhere/stuff/darwin') + channel = Channel("https://some.url.somewhere/stuff/darwin") assert channel.channel_name == "darwin" assert channel.channel_location == "some.url.somewhere/stuff" - channel = Channel('https://some.url.somewhere/stuff/darwin/label/dev') + channel = Channel("https://some.url.somewhere/stuff/darwin/label/dev") assert channel.channel_name == "darwin/label/dev" assert channel.channel_location == "some.url.somewhere/stuff" assert channel.platform is None - channel = Channel('https://some.url.somewhere/stuff/darwin/label/dev/linux-64') + channel = Channel("https://some.url.somewhere/stuff/darwin/label/dev/linux-64") assert channel.channel_name == "darwin/label/dev" assert channel.channel_location == "some.url.somewhere/stuff" - assert channel.platform == 'linux-64' + assert channel.platform == "linux-64" assert channel.package_filename is None - channel = Channel('https://some.url.somewhere/stuff/darwin/label/dev/linux-64/flask-1.0.tar.bz2') + channel = Channel( + "https://some.url.somewhere/stuff/darwin/label/dev/linux-64/flask-1.0.tar.bz2" + ) assert channel.channel_name == "darwin/label/dev" assert channel.channel_location == "some.url.somewhere/stuff" - assert channel.platform == 'linux-64' - assert channel.package_filename == 'flask-1.0.tar.bz2' + assert channel.platform == "linux-64" + assert channel.package_filename == "flask-1.0.tar.bz2" assert channel.auth is None assert channel.token is None assert channel.scheme == "https" - channel = Channel('https://some.url.somewhere/stuff/darwin/label/dev/linux-64/flask-1.0.tar.bz2') + channel = Channel( + "https://some.url.somewhere/stuff/darwin/label/dev/linux-64/flask-1.0.tar.bz2" + ) assert channel.channel_name == "darwin/label/dev" assert channel.channel_location == "some.url.somewhere/stuff" - assert channel.platform == 'linux-64' - assert channel.package_filename == 'flask-1.0.tar.bz2' + assert channel.platform == "linux-64" + assert channel.package_filename == "flask-1.0.tar.bz2" assert channel.auth is None assert channel.token is None assert channel.scheme == "https" def test_custom_channels_port_token_auth(self): - channel = Channel('chuck') + channel = Channel("chuck") assert channel.channel_name == "chuck" assert channel.channel_location == "another.url:8080/with/path" - assert channel.auth == 'user1:pass2' - assert channel.token == 'tk-1234' + assert channel.auth == "user1:pass2" + assert channel.token == "tk-1234" assert channel.scheme == "http" - channel = Channel('https://another.url:8080/with/path/chuck/label/dev/linux-64/flask-1.0.tar.bz2') + channel = Channel( + "https://another.url:8080/with/path/chuck/label/dev/linux-64/flask-1.0.tar.bz2" + ) assert channel.channel_name == "chuck/label/dev" assert channel.channel_location == "another.url:8080/with/path" - assert channel.auth == 'user1:pass2' - assert channel.token == 'tk-1234' + assert channel.auth == "user1:pass2" + assert channel.token == "tk-1234" assert channel.scheme == "https" - assert channel.platform == 'linux-64' - assert channel.package_filename == 'flask-1.0.tar.bz2' + assert channel.platform == "linux-64" + assert channel.package_filename == "flask-1.0.tar.bz2" def test_migrated_custom_channels(self): - channel = Channel('s3://just/cant/darwin/osx-64') + channel = Channel("s3://just/cant/darwin/osx-64") assert channel.channel_name == "darwin" assert channel.channel_location == "some.url.somewhere/stuff" - assert channel.platform == 'osx-64' + assert channel.platform == "osx-64" assert channel.package_filename is None assert channel.auth is None assert channel.token is None @@ -543,16 +568,21 @@ def test_migrated_custom_channels(self): "https://some.url.somewhere/stuff/darwin/noarch", ] - channel = Channel('https://some.url.somewhere/stuff/darwin/noarch/a-mighty-fine.tar.bz2') + channel = Channel( + "https://some.url.somewhere/stuff/darwin/noarch/a-mighty-fine.tar.bz2" + ) assert channel.channel_name == "darwin" assert channel.channel_location == "some.url.somewhere/stuff" - assert channel.platform == 'noarch' - assert channel.package_filename == 'a-mighty-fine.tar.bz2' + assert channel.platform == "noarch" + assert channel.package_filename == "a-mighty-fine.tar.bz2" assert channel.auth is None assert channel.token is None assert channel.scheme == "https" assert channel.canonical_name == "darwin" - assert channel.url() == "https://some.url.somewhere/stuff/darwin/noarch/a-mighty-fine.tar.bz2" + assert ( + channel.url() + == "https://some.url.somewhere/stuff/darwin/noarch/a-mighty-fine.tar.bz2" + ) assert channel.urls() == [ "https://some.url.somewhere/stuff/darwin/noarch", ] @@ -562,13 +592,17 @@ def test_migrated_custom_channels(self): ] def test_local_channel(self): - conda_bld_path = join(gettempdir(), 'conda-bld') + conda_bld_path = join(gettempdir(), "conda-bld") mkdir_p(conda_bld_path) try: - with env_var('CONDA_CROOT', conda_bld_path, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CROOT", + conda_bld_path, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): Channel._reset_state() - channel = Channel('local') - assert channel._channels[0].name.rsplit('/', 1)[-1] == 'conda-bld' + channel = Channel("local") + assert channel._channels[0].name.rsplit("/", 1)[-1] == "conda-bld" assert channel.channel_name == "local" assert channel.platform is None assert channel.package_filename is None @@ -588,63 +622,82 @@ def test_local_channel(self): assert channel.canonical_name == "local" assert channel.urls() == Channel(local_channel_first_subchannel).urls() - assert channel.urls()[0].startswith('file:///') + assert channel.urls()[0].startswith("file:///") finally: rm_rf(conda_bld_path) def test_defaults_channel(self): - channel = Channel('defaults') - assert channel.name == 'defaults' + channel = Channel("defaults") + assert channel.name == "defaults" assert channel.platform is None assert channel.package_filename is None assert channel.auth is None assert channel.token is None assert channel.scheme is None - assert channel.canonical_name == 'defaults' + assert channel.canonical_name == "defaults" assert channel.urls() == self.DEFAULT_URLS def test_file_channel(self): - channel = Channel("file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45/osx-64/flask-0.10.1-py35_2.tar.bz2") - assert channel.name == '5d9f5e45' - assert channel.location == '/var/folders/cp/7r2s_s593j7_cpdtp/T' - assert channel.platform == 'osx-64' + channel = Channel( + "file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45/osx-64/flask-0.10.1-py35_2.tar.bz2" + ) + assert channel.name == "5d9f5e45" + assert channel.location == "/var/folders/cp/7r2s_s593j7_cpdtp/T" + assert channel.platform == "osx-64" assert channel.package_filename == "flask-0.10.1-py35_2.tar.bz2" assert channel.auth is None assert channel.token is None assert channel.scheme == "file" - assert channel.url() == "file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45/osx-64/flask-0.10.1-py35_2.tar.bz2" + assert ( + channel.url() + == "file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45/osx-64/flask-0.10.1-py35_2.tar.bz2" + ) assert channel.urls() == [ "file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45/osx-64", - "file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45/noarch" + "file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45/noarch", ] - assert channel.canonical_name == 'file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45' + assert ( + channel.canonical_name + == "file:///var/folders/cp/7r2s_s593j7_cpdtp/T/5d9f5e45" + ) def test_old_channel_alias(self): - cf_urls = ["ftp://new.url:8082/conda-forge/%s" % self.platform, - "ftp://new.url:8082/conda-forge/noarch"] - assert Channel('conda-forge').urls() == cf_urls + cf_urls = [ + "ftp://new.url:8082/conda-forge/%s" % self.platform, + "ftp://new.url:8082/conda-forge/noarch", + ] + assert Channel("conda-forge").urls() == cf_urls url = "https://conda.anaconda.org/conda-forge/osx-64/some-great-package.tar.bz2" - assert Channel(url).canonical_name == 'conda-forge' - assert Channel(url).base_url == 'ftp://new.url:8082/conda-forge' - assert Channel(url).url() == "ftp://new.url:8082/conda-forge/osx-64/some-great-package.tar.bz2" + assert Channel(url).canonical_name == "conda-forge" + assert Channel(url).base_url == "ftp://new.url:8082/conda-forge" + assert ( + Channel(url).url() + == "ftp://new.url:8082/conda-forge/osx-64/some-great-package.tar.bz2" + ) assert Channel(url).urls() == [ "ftp://new.url:8082/conda-forge/osx-64", "ftp://new.url:8082/conda-forge/noarch", ] - channel = Channel("https://conda.anaconda.org/conda-forge/label/dev/linux-64/some-great-package.tar.bz2") - assert channel.url() == "ftp://new.url:8082/conda-forge/label/dev/linux-64/some-great-package.tar.bz2" + channel = Channel( + "https://conda.anaconda.org/conda-forge/label/dev/linux-64/some-great-package.tar.bz2" + ) + assert ( + channel.url() + == "ftp://new.url:8082/conda-forge/label/dev/linux-64/some-great-package.tar.bz2" + ) assert channel.urls() == [ "ftp://new.url:8082/conda-forge/label/dev/linux-64", "ftp://new.url:8082/conda-forge/label/dev/noarch", ] -class ChannelEnvironmentVarExpansionTest(TestCase): +class ChannelEnvironmentVarExpansionTest(TestCase): @classmethod def setUpClass(cls): - channels_config = dals(""" + channels_config = dals( + """ channels: - http://user22:$EXPANDED_PWD@some.url:8080 @@ -654,7 +707,8 @@ def setUpClass(cls): custom_channels: unexpanded: http://user1:$UNEXPANDED_PWD@another.url:8080/with/path/t/tk-1234 expanded: http://user33:$EXPANDED_PWD@another.url:8080/with/path/t/tk-1234 - """) + """ + ) reset_context() rd = { "testdata": YamlRawParameter.make_raw_parameters( @@ -668,23 +722,23 @@ def tearDownClass(cls): reset_context() def test_unexpanded_variables(self): - with env_var('EXPANDED_PWD', 'pass44'): - channel = Channel('unexpanded') - assert channel.auth == 'user1:$UNEXPANDED_PWD' + with env_var("EXPANDED_PWD", "pass44"): + channel = Channel("unexpanded") + assert channel.auth == "user1:$UNEXPANDED_PWD" def test_expanded_variables(self): - with env_var('EXPANDED_PWD', 'pass44'): - channel = Channel('expanded') - assert channel.auth == 'user33:pass44' - assert context.channels[0] == 'http://user22:pass44@some.url:8080' - assert context.allowlist_channels[0] == 'http://user22:pass44@some.url:8080' + with env_var("EXPANDED_PWD", "pass44"): + channel = Channel("expanded") + assert channel.auth == "user33:pass44" + assert context.channels[0] == "http://user22:pass44@some.url:8080" + assert context.allowlist_channels[0] == "http://user22:pass44@some.url:8080" class ChannelAuthTokenPriorityTests(TestCase): - @classmethod def setUpClass(cls): - string = dals(""" + string = dals( + """ custom_channels: chuck: http://user1:pass2@another.url:8080/with/path/t/tk-1234 chuck/subchan: http://user33:pass44@another.url:8080/with/path/t/tk-1234 @@ -697,7 +751,8 @@ def setUpClass(cls): - http://192.168.0.15:8080/pkgs/anaconda - donald/label/main - http://us:pw@192.168.0.15:8080/t/tkn-123/pkgs/r - """) + """ + ) reset_context(()) rd = { "testdata": YamlRawParameter.make_raw_parameters( @@ -717,44 +772,70 @@ def test_named_custom_channel(self): channel = Channel("chuck") assert channel.canonical_name == "chuck" assert channel.location == "another.url:8080/with/path" - assert channel.url() == "http://another.url:8080/with/path/chuck/%s" % self.platform - assert channel.url(True) == "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/%s" % self.platform + assert ( + channel.url() + == "http://another.url:8080/with/path/chuck/%s" % self.platform + ) + assert ( + channel.url(True) + == "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/%s" + % self.platform + ) assert channel.urls() == [ "http://another.url:8080/with/path/chuck/%s" % self.platform, "http://another.url:8080/with/path/chuck/noarch", ] assert channel.urls(True) == [ - "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/%s" % self.platform, + "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/%s" + % self.platform, "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/noarch", ] channel = Channel("chuck/label/dev") assert channel.canonical_name == "chuck/label/dev" assert channel.location == "another.url:8080/with/path" - assert channel.url() == "http://another.url:8080/with/path/chuck/label/dev/%s" % self.platform - assert channel.url(True) == "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/label/dev/%s" % self.platform + assert ( + channel.url() + == "http://another.url:8080/with/path/chuck/label/dev/%s" % self.platform + ) + assert ( + channel.url(True) + == "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/label/dev/%s" + % self.platform + ) assert channel.urls() == [ "http://another.url:8080/with/path/chuck/label/dev/%s" % self.platform, "http://another.url:8080/with/path/chuck/label/dev/noarch", ] assert channel.urls(True) == [ - "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/label/dev/%s" % self.platform, + "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/label/dev/%s" + % self.platform, "http://user1:pass2@another.url:8080/with/path/t/tk-1234/chuck/label/dev/noarch", ] def test_url_custom_channel(self): # scheme and credentials within url should override what's registered in config - channel = Channel("https://newuser:newpass@another.url:8080/with/path/t/new-token/chuck/label/dev") + channel = Channel( + "https://newuser:newpass@another.url:8080/with/path/t/new-token/chuck/label/dev" + ) assert channel.canonical_name == "chuck/label/dev" assert channel.location == "another.url:8080/with/path" - assert channel.url() == "https://another.url:8080/with/path/chuck/label/dev/%s" % self.platform - assert channel.url(True) == "https://newuser:newpass@another.url:8080/with/path/t/new-token/chuck/label/dev/%s" % self.platform + assert ( + channel.url() + == "https://another.url:8080/with/path/chuck/label/dev/%s" % self.platform + ) + assert ( + channel.url(True) + == "https://newuser:newpass@another.url:8080/with/path/t/new-token/chuck/label/dev/%s" + % self.platform + ) assert channel.urls() == [ "https://another.url:8080/with/path/chuck/label/dev/%s" % self.platform, "https://another.url:8080/with/path/chuck/label/dev/noarch", ] assert channel.urls(True) == [ - "https://newuser:newpass@another.url:8080/with/path/t/new-token/chuck/label/dev/%s" % self.platform, + "https://newuser:newpass@another.url:8080/with/path/t/new-token/chuck/label/dev/%s" + % self.platform, "https://newuser:newpass@another.url:8080/with/path/t/new-token/chuck/label/dev/noarch", ] @@ -762,30 +843,46 @@ def test_named_custom_channel_w_subchan(self): channel = Channel("chuck/subchan") assert channel.canonical_name == "chuck/subchan" assert channel.location == "another.url:8080/with/path" - assert channel.url() == "http://another.url:8080/with/path/chuck/subchan/%s" % self.platform - assert channel.url( - True) == "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/%s" % self.platform + assert ( + channel.url() + == "http://another.url:8080/with/path/chuck/subchan/%s" % self.platform + ) + assert ( + channel.url(True) + == "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/%s" + % self.platform + ) assert channel.urls() == [ "http://another.url:8080/with/path/chuck/subchan/%s" % self.platform, "http://another.url:8080/with/path/chuck/subchan/noarch", ] assert channel.urls(True) == [ - "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/%s" % self.platform, + "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/%s" + % self.platform, "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/noarch", ] channel = Channel("chuck/subchan/label/main") assert channel.canonical_name == "chuck/subchan/label/main" assert channel.location == "another.url:8080/with/path" - assert channel.url() == "http://another.url:8080/with/path/chuck/subchan/label/main/%s" % self.platform - assert channel.url( - True) == "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" % self.platform + assert ( + channel.url() + == "http://another.url:8080/with/path/chuck/subchan/label/main/%s" + % self.platform + ) + assert ( + channel.url(True) + == "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" + % self.platform + ) assert channel.urls() == [ - "http://another.url:8080/with/path/chuck/subchan/label/main/%s" % self.platform, + "http://another.url:8080/with/path/chuck/subchan/label/main/%s" + % self.platform, "http://another.url:8080/with/path/chuck/subchan/label/main/noarch", ] assert channel.urls(True) == [ - "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" % self.platform, + "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" + % self.platform, "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/noarch", ] @@ -793,14 +890,24 @@ def test_url_custom_channel_w_subchan(self): channel = Channel("http://another.url:8080/with/path/chuck/subchan/label/main") assert channel.canonical_name == "chuck/subchan/label/main" assert channel.location == "another.url:8080/with/path" - assert channel.url() == "http://another.url:8080/with/path/chuck/subchan/label/main/%s" % self.platform - assert channel.url(True) == "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" % self.platform + assert ( + channel.url() + == "http://another.url:8080/with/path/chuck/subchan/label/main/%s" + % self.platform + ) + assert ( + channel.url(True) + == "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" + % self.platform + ) assert channel.urls() == [ - "http://another.url:8080/with/path/chuck/subchan/label/main/%s" % self.platform, + "http://another.url:8080/with/path/chuck/subchan/label/main/%s" + % self.platform, "http://another.url:8080/with/path/chuck/subchan/label/main/noarch", ] assert channel.urls(True) == [ - "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" % self.platform, + "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/%s" + % self.platform, "http://user33:pass44@another.url:8080/with/path/t/tk-1234/chuck/subchan/label/main/noarch", ] @@ -809,7 +916,10 @@ def test_channel_alias(self): assert channel.canonical_name == "charlie" assert channel.location == "new.url:8082" assert channel.url() == "ftp://new.url:8082/charlie/%s" % self.platform - assert channel.url(True) == "ftp://nm:ps@new.url:8082/t/zyx-wvut/charlie/%s" % self.platform + assert ( + channel.url(True) + == "ftp://nm:ps@new.url:8082/t/zyx-wvut/charlie/%s" % self.platform + ) assert channel.urls() == [ "ftp://new.url:8082/charlie/%s" % self.platform, "ftp://new.url:8082/charlie/noarch", @@ -822,8 +932,14 @@ def test_channel_alias(self): channel = Channel("charlie/label/dev") assert channel.canonical_name == "charlie/label/dev" assert channel.location == "new.url:8082" - assert channel.url() == "ftp://new.url:8082/charlie/label/dev/%s" % self.platform - assert channel.url(True) == "ftp://nm:ps@new.url:8082/t/zyx-wvut/charlie/label/dev/%s" % self.platform + assert ( + channel.url() == "ftp://new.url:8082/charlie/label/dev/%s" % self.platform + ) + assert ( + channel.url(True) + == "ftp://nm:ps@new.url:8082/t/zyx-wvut/charlie/label/dev/%s" + % self.platform + ) assert channel.urls() == [ "ftp://new.url:8082/charlie/label/dev/%s" % self.platform, "ftp://new.url:8082/charlie/label/dev/noarch", @@ -836,9 +952,14 @@ def test_channel_alias(self): channel = Channel("ftp://nm:ps@new.url:8082/t/new-token/charlie/label/dev") assert channel.canonical_name == "charlie/label/dev" assert channel.location == "new.url:8082" - assert channel.url() == "ftp://new.url:8082/charlie/label/dev/%s" % self.platform - assert channel.url( - True) == "ftp://nm:ps@new.url:8082/t/new-token/charlie/label/dev/%s" % self.platform + assert ( + channel.url() == "ftp://new.url:8082/charlie/label/dev/%s" % self.platform + ) + assert ( + channel.url(True) + == "ftp://nm:ps@new.url:8082/t/new-token/charlie/label/dev/%s" + % self.platform + ) assert channel.urls() == [ "ftp://new.url:8082/charlie/label/dev/%s" % self.platform, "ftp://new.url:8082/charlie/label/dev/noarch", @@ -849,7 +970,7 @@ def test_channel_alias(self): ] def test_default_channels(self): - channel = Channel('defaults') + channel = Channel("defaults") assert channel.canonical_name == "defaults" assert channel.location is None assert channel.url() is None @@ -883,7 +1004,6 @@ def test_default_channels(self): class UrlChannelTests(TestCase): - def test_file_urls(self): url = "file:///machine/shared_folder" c = Channel(url) @@ -921,10 +1041,12 @@ def test_file_url_with_backslashes(self): ] def test_env_var_file_urls(self): - channels = ("file://\\\\network_share\\shared_folder\\path\\conda", - "https://some.url/ch_name", - "file:///some/place/on/my/machine",) - with env_var("CONDA_CHANNELS", ','.join(channels)): + channels = ( + "file://\\\\network_share\\shared_folder\\path\\conda", + "https://some.url/ch_name", + "file:///some/place/on/my/machine", + ) + with env_var("CONDA_CHANNELS", ",".join(channels)): new_context = Context(()) assert new_context.channels == channels @@ -942,7 +1064,7 @@ def test_env_var_file_urls(self): } def test_subdirs_env_var(self): - subdirs = ('linux-highest', 'linux-64', 'noarch') + subdirs = ("linux-highest", "linux-64", "noarch") def _channel_urls(channels=None): for channel in channels or DEFAULT_CHANNELS: @@ -957,40 +1079,58 @@ def _channel_urls(channels=None): c = Channel("defaults") assert c.urls() == list(_channel_urls()) - c = Channel('conda-forge') - assert c.urls() == list(_channel_urls(('conda-forge',))) + c = Channel("conda-forge") + assert c.urls() == list(_channel_urls(("conda-forge",))) - channels = ('bioconda', 'conda-forge') + channels = ("bioconda", "conda-forge") prioritized = prioritize_channels(channels) assert prioritized == { "https://conda.anaconda.org/bioconda/linux-highest": ("bioconda", 0), "https://conda.anaconda.org/bioconda/linux-64": ("bioconda", 0), "https://conda.anaconda.org/bioconda/noarch": ("bioconda", 0), - "https://conda.anaconda.org/conda-forge/linux-highest": ("conda-forge", 1), + "https://conda.anaconda.org/conda-forge/linux-highest": ( + "conda-forge", + 1, + ), "https://conda.anaconda.org/conda-forge/linux-64": ("conda-forge", 1), "https://conda.anaconda.org/conda-forge/noarch": ("conda-forge", 1), } - prioritized = prioritize_channels(channels, subdirs=('linux-again', 'noarch')) + prioritized = prioritize_channels( + channels, subdirs=("linux-again", "noarch") + ) assert prioritized == { "https://conda.anaconda.org/bioconda/linux-again": ("bioconda", 0), "https://conda.anaconda.org/bioconda/noarch": ("bioconda", 0), - "https://conda.anaconda.org/conda-forge/linux-again": ("conda-forge", 1), + "https://conda.anaconda.org/conda-forge/linux-again": ( + "conda-forge", + 1, + ), "https://conda.anaconda.org/conda-forge/noarch": ("conda-forge", 1), } def test_subdir_env_var(self): - with env_var('CONDA_SUBDIR', 'osx-1012-x84_64', stack_callback=conda_tests_ctxt_mgmt_def_pol): - channel = Channel('https://conda.anaconda.org/msarahan/osx-1012-x84_64/clangxx_osx-1012-x86_64-10.12-h0bb54af_0.tar.bz2') - assert channel.base_url == 'https://conda.anaconda.org/msarahan' - assert channel.package_filename == 'clangxx_osx-1012-x86_64-10.12-h0bb54af_0.tar.bz2' - assert channel.platform == 'osx-1012-x84_64' # the platform attribute is misnamed here in conda 4.3; conda 4.4 code can correctly use the channel.subdir attribute + with env_var( + "CONDA_SUBDIR", + "osx-1012-x84_64", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + channel = Channel( + "https://conda.anaconda.org/msarahan/osx-1012-x84_64/clangxx_osx-1012-x86_64-10.12-h0bb54af_0.tar.bz2" + ) + assert channel.base_url == "https://conda.anaconda.org/msarahan" + assert ( + channel.package_filename + == "clangxx_osx-1012-x86_64-10.12-h0bb54af_0.tar.bz2" + ) + assert ( + channel.platform == "osx-1012-x84_64" + ) # the platform attribute is misnamed here in conda 4.3; conda 4.4 code can correctly use the channel.subdir attribute class UnknownChannelTests(TestCase): - def test_regression_against_unknown_none(self): - defaults = Channel('defaults') + defaults = Channel("defaults") channel = Channel(None) assert channel.scheme is None @@ -1003,7 +1143,7 @@ def test_regression_against_unknown_none(self): assert channel.url() == defaults.url() assert channel.urls() == defaults.urls() - channel = Channel('') + channel = Channel("") assert channel.scheme is None assert channel.location is None assert channel.platform is None @@ -1014,7 +1154,7 @@ def test_regression_against_unknown_none(self): assert channel.url() == defaults.url() assert channel.urls() == defaults.urls() - channel = Channel('None:///') + channel = Channel("None:///") assert channel.scheme is None assert channel.location is None assert channel.platform is None @@ -1025,7 +1165,7 @@ def test_regression_against_unknown_none(self): assert channel.url() == defaults.url() assert channel.urls() == defaults.urls() - channel = Channel('None') + channel = Channel("None") assert channel.scheme is None assert channel.location is None assert channel.platform is None @@ -1038,15 +1178,16 @@ def test_regression_against_unknown_none(self): class OtherChannelParsingTests(TestCase): - @classmethod def setUpClass(cls): - string = dals(""" + string = dals( + """ default_channels: - http://test/conda/anaconda channels: - http://test/conda/anaconda-cluster - """) + """ + ) reset_context() rd = { "testdata": YamlRawParameter.make_raw_parameters( @@ -1064,48 +1205,83 @@ def tearDownClass(cls): def test_channels_with_dashes(self): # regression test for #5763 - assert context.channels[0] == 'http://test/conda/anaconda-cluster' + assert context.channels[0] == "http://test/conda/anaconda-cluster" channel_urls = prioritize_channels(context.channels) channel_urls = tuple(channel_urls.items()) - assert channel_urls[0] == ('http://test/conda/anaconda-cluster/%s' % context.subdir, ('http://test/conda/anaconda-cluster', 0)) - assert channel_urls[1] == ('http://test/conda/anaconda-cluster/noarch', ('http://test/conda/anaconda-cluster', 0)) + assert channel_urls[0] == ( + "http://test/conda/anaconda-cluster/%s" % context.subdir, + ("http://test/conda/anaconda-cluster", 0), + ) + assert channel_urls[1] == ( + "http://test/conda/anaconda-cluster/noarch", + ("http://test/conda/anaconda-cluster", 0), + ) def test_multichannel_priority(): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): - channels = ['conda-test', 'defaults', 'conda-forge'] - subdirs = ['new-optimized-subdir', 'linux-32', 'noarch'] - channel_priority_map = prioritize_channels(channels, with_credentials=True, subdirs=subdirs) + channels = ["conda-test", "defaults", "conda-forge"] + subdirs = ["new-optimized-subdir", "linux-32", "noarch"] + channel_priority_map = prioritize_channels( + channels, with_credentials=True, subdirs=subdirs + ) if on_win: assert channel_priority_map == { - "https://conda.anaconda.org/conda-test/new-optimized-subdir": ("conda-test", 0), + "https://conda.anaconda.org/conda-test/new-optimized-subdir": ( + "conda-test", + 0, + ), "https://conda.anaconda.org/conda-test/linux-32": ("conda-test", 0), "https://conda.anaconda.org/conda-test/noarch": ("conda-test", 0), - "https://repo.anaconda.com/pkgs/main/new-optimized-subdir": ("defaults", 1), + "https://repo.anaconda.com/pkgs/main/new-optimized-subdir": ( + "defaults", + 1, + ), "https://repo.anaconda.com/pkgs/main/linux-32": ("defaults", 1), "https://repo.anaconda.com/pkgs/main/noarch": ("defaults", 1), - "https://repo.anaconda.com/pkgs/r/new-optimized-subdir": ("defaults", 2), + "https://repo.anaconda.com/pkgs/r/new-optimized-subdir": ( + "defaults", + 2, + ), "https://repo.anaconda.com/pkgs/r/linux-32": ("defaults", 2), "https://repo.anaconda.com/pkgs/r/noarch": ("defaults", 2), - "https://repo.anaconda.com/pkgs/msys2/new-optimized-subdir": ("defaults", 3), + "https://repo.anaconda.com/pkgs/msys2/new-optimized-subdir": ( + "defaults", + 3, + ), "https://repo.anaconda.com/pkgs/msys2/linux-32": ("defaults", 3), "https://repo.anaconda.com/pkgs/msys2/noarch": ("defaults", 3), - "https://conda.anaconda.org/conda-forge/new-optimized-subdir": ("conda-forge", 4), + "https://conda.anaconda.org/conda-forge/new-optimized-subdir": ( + "conda-forge", + 4, + ), "https://conda.anaconda.org/conda-forge/linux-32": ("conda-forge", 4), "https://conda.anaconda.org/conda-forge/noarch": ("conda-forge", 4), } else: assert channel_priority_map == { - "https://conda.anaconda.org/conda-test/new-optimized-subdir": ("conda-test", 0), + "https://conda.anaconda.org/conda-test/new-optimized-subdir": ( + "conda-test", + 0, + ), "https://conda.anaconda.org/conda-test/linux-32": ("conda-test", 0), "https://conda.anaconda.org/conda-test/noarch": ("conda-test", 0), - "https://repo.anaconda.com/pkgs/main/new-optimized-subdir": ("defaults", 1), + "https://repo.anaconda.com/pkgs/main/new-optimized-subdir": ( + "defaults", + 1, + ), "https://repo.anaconda.com/pkgs/main/linux-32": ("defaults", 1), "https://repo.anaconda.com/pkgs/main/noarch": ("defaults", 1), - "https://repo.anaconda.com/pkgs/r/new-optimized-subdir": ("defaults", 2), + "https://repo.anaconda.com/pkgs/r/new-optimized-subdir": ( + "defaults", + 2, + ), "https://repo.anaconda.com/pkgs/r/linux-32": ("defaults", 2), "https://repo.anaconda.com/pkgs/r/noarch": ("defaults", 2), - "https://conda.anaconda.org/conda-forge/new-optimized-subdir": ("conda-forge", 3), + "https://conda.anaconda.org/conda-forge/new-optimized-subdir": ( + "conda-forge", + 3, + ), "https://conda.anaconda.org/conda-forge/linux-32": ("conda-forge", 3), "https://conda.anaconda.org/conda-forge/noarch": ("conda-forge", 3), } @@ -1116,21 +1292,33 @@ def test_ppc64le_vs_ppc64(): ppc64_channel = Channel("https://conda.anaconda.org/dummy-channel/linux-ppc64") assert ppc64_channel.subdir == "linux-ppc64" - assert ppc64_channel.url(with_credentials=True) == "https://conda.anaconda.org/dummy-channel/linux-ppc64" + assert ( + ppc64_channel.url(with_credentials=True) + == "https://conda.anaconda.org/dummy-channel/linux-ppc64" + ) ppc64le_channel = Channel("https://conda.anaconda.org/dummy-channel/linux-ppc64le") assert ppc64le_channel.subdir == "linux-ppc64le" - assert ppc64le_channel.url(with_credentials=True) == "https://conda.anaconda.org/dummy-channel/linux-ppc64le" + assert ( + ppc64le_channel.url(with_credentials=True) + == "https://conda.anaconda.org/dummy-channel/linux-ppc64le" + ) print(Channel._cache_) Channel._cache_.clear() ppc64le_channel = Channel("https://conda.anaconda.org/dummy-channel/linux-ppc64le") assert ppc64le_channel.subdir == "linux-ppc64le" - assert ppc64le_channel.url(with_credentials=True) == "https://conda.anaconda.org/dummy-channel/linux-ppc64le" + assert ( + ppc64le_channel.url(with_credentials=True) + == "https://conda.anaconda.org/dummy-channel/linux-ppc64le" + ) ppc64_channel = Channel("https://conda.anaconda.org/dummy-channel/linux-ppc64") assert ppc64_channel.subdir == "linux-ppc64" - assert ppc64_channel.url(with_credentials=True) == "https://conda.anaconda.org/dummy-channel/linux-ppc64" + assert ( + ppc64_channel.url(with_credentials=True) + == "https://conda.anaconda.org/dummy-channel/linux-ppc64" + ) def test_channel_mangles_urls(): diff --git a/tests/models/test_dist.py b/tests/models/test_dist.py index decb384e4b0..cab5c16ac02 100644 --- a/tests/models/test_dist.py +++ b/tests/models/test_dist.py @@ -1,21 +1,18 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - +from logging import getLogger from os.path import join from tempfile import gettempdir -from conda.base.constants import UNKNOWN_CHANNEL +import pytest -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol +from conda.base.constants import UNKNOWN_CHANNEL +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context from conda.common.io import env_var from conda.common.url import join_url, path_to_url from conda.gateways.disk.create import mkdir_p from conda.gateways.disk.delete import rm_rf from conda.models.dist import Dist -from logging import getLogger - -import pytest log = getLogger(__name__) @@ -40,16 +37,17 @@ def test_dist(fmt): d3 = Dist(d2) assert d3 is d2 + @pytest.mark.parametrize("fmt", [".conda", ".tar.bz2"]) def test_channel(fmt): d = Dist.from_string(f"conda-forge::spyder-app-2.3.8-py27_0{fmt}") - assert d.channel == 'conda-forge' + assert d.channel == "conda-forge" assert d.quad[0] == "spyder-app" assert d.dist_name == "spyder-app-2.3.8-py27_0" assert d.fmt == fmt d = Dist.from_string(f"s3://some/bucket/name::spyder-app-2.3.8-py27_0{fmt}") - assert d.channel == 's3://some/bucket/name' + assert d.channel == "s3://some/bucket/name" assert d.quad[0] == "spyder-app" assert d.dist_name == "spyder-app-2.3.8-py27_0" assert d.to_url() == join_url( @@ -62,10 +60,10 @@ def test_dist_with_channel_url(fmt): # standard named channel url = f"https://repo.anaconda.com/pkgs/main/win-64/spyder-app-2.3.8-py27_0{fmt}" d = Dist(url) - assert d.channel == 'defaults' - assert d.name == 'spyder-app' - assert d.version == '2.3.8' - assert d.build_string == 'py27_0' + assert d.channel == "defaults" + assert d.name == "spyder-app" + assert d.version == "2.3.8" + assert d.build_string == "py27_0" assert d.fmt == fmt assert d.to_url() == url @@ -74,10 +72,10 @@ def test_dist_with_channel_url(fmt): # standard url channel url = f"https://not.real.continuum.io/pkgs/main/win-64/spyder-app-2.3.8-py27_0{fmt}" d = Dist(url) - assert d.channel == 'defaults' # because pkgs/anaconda is in defaults - assert d.name == 'spyder-app' - assert d.version == '2.3.8' - assert d.build_string == 'py27_0' + assert d.channel == "defaults" # because pkgs/anaconda is in defaults + assert d.name == "spyder-app" + assert d.version == "2.3.8" + assert d.build_string == "py27_0" assert d.fmt == fmt assert d.to_url() == url @@ -86,28 +84,32 @@ def test_dist_with_channel_url(fmt): # another standard url channel url = f"https://not.real.continuum.io/not/anaconda/win-64/spyder-app-2.3.8-py27_0{fmt}" d = Dist(url) - assert d.channel == 'https://not.real.continuum.io/not/anaconda' - assert d.name == 'spyder-app' - assert d.version == '2.3.8' - assert d.build_string == 'py27_0' + assert d.channel == "https://not.real.continuum.io/not/anaconda" + assert d.name == "spyder-app" + assert d.version == "2.3.8" + assert d.build_string == "py27_0" assert d.fmt == fmt assert d.to_url() == url assert d.is_channel is True # local file url that is a named channel - conda_bld_path = join(gettempdir(), 'conda-bld') + conda_bld_path = join(gettempdir(), "conda-bld") try: mkdir_p(conda_bld_path) with env_var( - "CONDA_BLD_PATH", conda_bld_path, stack_callback=conda_tests_ctxt_mgmt_def_pol + "CONDA_BLD_PATH", + conda_bld_path, + stack_callback=conda_tests_ctxt_mgmt_def_pol, ): - url = path_to_url(join_url(context.croot, "osx-64", f"bcrypt-3.1.1-py35_2{fmt}")) + url = path_to_url( + join_url(context.croot, "osx-64", f"bcrypt-3.1.1-py35_2{fmt}") + ) d = Dist(url) - assert d.channel == 'local' - assert d.name == 'bcrypt' - assert d.version == '3.1.1' - assert d.build_string == 'py35_2' + assert d.channel == "local" + assert d.name == "bcrypt" + assert d.version == "3.1.1" + assert d.build_string == "py35_2" assert d.fmt == fmt assert d.to_url() == url @@ -116,28 +118,29 @@ def test_dist_with_channel_url(fmt): rm_rf(conda_bld_path) # local file url that is not a named channel - url = join_url("file:///some/location/on/disk", "osx-64", f"bcrypt-3.1.1-py35_2{fmt}") + url = join_url( + "file:///some/location/on/disk", "osx-64", f"bcrypt-3.1.1-py35_2{fmt}" + ) d = Dist(url) - assert d.channel == 'file:///some/location/on/disk' - assert d.name == 'bcrypt' - assert d.version == '3.1.1' - assert d.build_string == 'py35_2' + assert d.channel == "file:///some/location/on/disk" + assert d.name == "bcrypt" + assert d.version == "3.1.1" + assert d.build_string == "py35_2" assert d.fmt == fmt assert d.to_url() == url assert d.is_channel is True - @pytest.mark.parametrize("fmt", [".conda", ".tar.bz2"]) def test_dist_with_non_channel_url(fmt): # contrived url url = f"https://repo.anaconda.com/pkgs/anaconda/cffi-1.9.1-py34_0{fmt}" d = Dist(url) - assert d.channel == '' - assert d.name == 'cffi' - assert d.version == '1.9.1' - assert d.build_string == 'py34_0' + assert d.channel == "" + assert d.name == "cffi" + assert d.version == "1.9.1" + assert d.build_string == "py34_0" assert d.fmt == fmt assert d.to_url() == url @@ -146,10 +149,10 @@ def test_dist_with_non_channel_url(fmt): # file url that is not a channel url = path_to_url(join_url(context.croot, f"cffi-1.9.1-py34_0{fmt}")) d = Dist(url) - assert d.channel == '' - assert d.name == 'cffi' - assert d.version == '1.9.1' - assert d.build_string == 'py34_0' + assert d.channel == "" + assert d.name == "cffi" + assert d.version == "1.9.1" + assert d.build_string == "py34_0" assert d.fmt == fmt assert d.to_url() == url @@ -159,10 +162,10 @@ def test_dist_with_non_channel_url(fmt): # TODO: maybe this should look up the channel in urls.txt? or maybe that's too coupled? url = join_url(path_to_url(context.pkgs_dirs[0]), f"cffi-1.9.1-py34_0{fmt}") d = Dist(url) - assert d.channel == '' - assert d.name == 'cffi' - assert d.version == '1.9.1' - assert d.build_string == 'py34_0' + assert d.channel == "" + assert d.name == "cffi" + assert d.version == "1.9.1" + assert d.build_string == "py34_0" assert d.fmt == fmt assert d.to_url() == url diff --git a/tests/models/test_index_record.py b/tests/models/test_index_record.py index 078b7a8d50a..146923e1be4 100644 --- a/tests/models/test_index_record.py +++ b/tests/models/test_index_record.py @@ -1,45 +1,51 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from logging import getLogger from unittest import TestCase -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context from conda.common.io import env_unmodified from conda.models.channel import Channel from conda.models.records import PackageRecord, PrefixRecord log = getLogger(__name__) -blas_value = 'accelerate' if context.subdir == 'osx-64' else 'openblas' +blas_value = "accelerate" if context.subdir == "osx-64" else "openblas" -class PrefixRecordTests(TestCase): +class PrefixRecordTests(TestCase): def test_prefix_record_no_channel(self): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): pr = PrefixRecord( - name='austin', - version='1.2.3', - build_string='py34_2', + name="austin", + version="1.2.3", + build_string="py34_2", build_number=2, url="https://repo.anaconda.com/pkgs/main/win-32/austin-1.2.3-py34_2.tar.bz2", subdir="win-32", - md5='0123456789', + md5="0123456789", files=(), ) - assert pr.url == "https://repo.anaconda.com/pkgs/main/win-32/austin-1.2.3-py34_2.tar.bz2" - assert pr.channel.canonical_name == 'defaults' + assert ( + pr.url + == "https://repo.anaconda.com/pkgs/main/win-32/austin-1.2.3-py34_2.tar.bz2" + ) + assert pr.channel.canonical_name == "defaults" assert pr.subdir == "win-32" assert pr.fn == "austin-1.2.3-py34_2.tar.bz2" - channel_str = str(Channel("https://repo.anaconda.com/pkgs/main/win-32/austin-1.2.3-py34_2.tar.bz2")) + channel_str = str( + Channel( + "https://repo.anaconda.com/pkgs/main/win-32/austin-1.2.3-py34_2.tar.bz2" + ) + ) assert channel_str == "https://repo.anaconda.com/pkgs/main/win-32" assert dict(pr.dump()) == dict( - name='austin', - version='1.2.3', - build='py34_2', + name="austin", + version="1.2.3", + build="py34_2", build_number=2, url="https://repo.anaconda.com/pkgs/main/win-32/austin-1.2.3-py34_2.tar.bz2", - md5='0123456789', + md5="0123456789", files=(), channel=channel_str, subdir="win-32", @@ -53,23 +59,23 @@ def test_index_record_timestamp(self): ts_secs = 1507565728 ts_millis = ts_secs * 1000 rec = PackageRecord( - name='test-package', - version='1.2.3', - build='2', + name="test-package", + version="1.2.3", + build="2", build_number=2, - timestamp=ts_secs + timestamp=ts_secs, ) assert rec.timestamp == ts_secs - assert rec.dump()['timestamp'] == ts_millis + assert rec.dump()["timestamp"] == ts_millis ts_millis = 1507565728999 ts_secs = ts_millis / 1000 rec = PackageRecord( - name='test-package', - version='1.2.3', - build='2', + name="test-package", + version="1.2.3", + build="2", build_number=2, - timestamp=ts_secs + timestamp=ts_secs, ) assert rec.timestamp == ts_secs - assert rec.dump()['timestamp'] == ts_millis + assert rec.dump()["timestamp"] == ts_millis diff --git a/tests/models/test_match_spec.py b/tests/models/test_match_spec.py index a2e1b6cf27e..ca97e624cba 100644 --- a/tests/models/test_match_spec.py +++ b/tests/models/test_match_spec.py @@ -1,25 +1,22 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - from unittest import TestCase import pytest from conda.base.constants import CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2 -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context from conda.cli.common import arg2spec, spec_from_line -from conda.common.io import env_unmodified from conda.common.compat import on_win +from conda.common.io import env_unmodified from conda.exceptions import CondaValueError, InvalidMatchSpec, InvalidSpec from conda.models.channel import Channel from conda.models.dist import Dist -from conda.models.records import PackageRecord from conda.models.match_spec import ChannelMatch, MatchSpec, _parse_spec_str +from conda.models.records import PackageRecord from conda.models.version import VersionSpec - -blas_value = 'accelerate' if context.subdir == 'osx-64' else 'openblas' +blas_value = "accelerate" if context.subdir == "osx-64" else "openblas" def m(string): @@ -33,85 +30,107 @@ def DPkg(s, **kwargs): name=d.name, version=d.version, build=d.build_string, - build_number=int(d.build_string.rsplit('_', 1)[-1]), + build_number=int(d.build_string.rsplit("_", 1)[-1]), channel=d.channel, subdir=context.subdir, md5="012345789", - **kwargs) + **kwargs, + ) class MatchSpecTests(TestCase): - def test_match_1(self): for spec, result in ( - ('numpy 1.7*', True), ('numpy 1.7.1', True), - ('numpy 1.7', False), ('numpy 1.5*', False), - ('numpy >=1.5', True), ('numpy >=1.5,<2', True), - ('numpy >=1.8,<1.9', False), ('numpy >1.5,<2,!=1.7.1', False), - ('numpy >1.8,<2|==1.7', False),('numpy >1.8,<2|>=1.7.1', True), - ('numpy >=1.8|1.7*', True), ('numpy ==1.7', False), - ('numpy >=1.5,>1.6', True), ('numpy ==1.7.1', True), - ('numpy ==1.7.1.0', True), ('numpy==1.7.1.0.0', True), - ('numpy >=1,*.7.*', True), ('numpy *.7.*,>=1', True), - ('numpy >=1,*.8.*', False), ('numpy >=2,*.7.*', False), - ('numpy 1.6*|1.7*', True), ('numpy 1.6*|1.8*', False), - ('numpy 1.6.2|1.7*', True), ('numpy 1.6.2|1.7.1', True), - ('numpy 1.6.2|1.7.0', False), ('numpy 1.7.1 py27_0', True), - ('numpy 1.7.1 py26_0', False), ('numpy >1.7.1a', True), - ('python', False), + ("numpy 1.7*", True), + ("numpy 1.7.1", True), + ("numpy 1.7", False), + ("numpy 1.5*", False), + ("numpy >=1.5", True), + ("numpy >=1.5,<2", True), + ("numpy >=1.8,<1.9", False), + ("numpy >1.5,<2,!=1.7.1", False), + ("numpy >1.8,<2|==1.7", False), + ("numpy >1.8,<2|>=1.7.1", True), + ("numpy >=1.8|1.7*", True), + ("numpy ==1.7", False), + ("numpy >=1.5,>1.6", True), + ("numpy ==1.7.1", True), + ("numpy ==1.7.1.0", True), + ("numpy==1.7.1.0.0", True), + ("numpy >=1,*.7.*", True), + ("numpy *.7.*,>=1", True), + ("numpy >=1,*.8.*", False), + ("numpy >=2,*.7.*", False), + ("numpy 1.6*|1.7*", True), + ("numpy 1.6*|1.8*", False), + ("numpy 1.6.2|1.7*", True), + ("numpy 1.6.2|1.7.1", True), + ("numpy 1.6.2|1.7.0", False), + ("numpy 1.7.1 py27_0", True), + ("numpy 1.7.1 py26_0", False), + ("numpy >1.7.1a", True), + ("python", False), ): m = MatchSpec(spec) - assert m.match(DPkg('numpy-1.7.1-py27_0.tar.bz2')) == result - assert 'name' in m - assert m.name == 'python' or 'version' in m + assert m.match(DPkg("numpy-1.7.1-py27_0.tar.bz2")) == result + assert "name" in m + assert m.name == "python" or "version" in m # both version numbers conforming to PEP 440 - assert not MatchSpec('numpy >=1.0.1').match(DPkg('numpy-1.0.1a-0.tar.bz2')) + assert not MatchSpec("numpy >=1.0.1").match(DPkg("numpy-1.0.1a-0.tar.bz2")) # both version numbers non-conforming to PEP 440 - assert not MatchSpec('numpy >=1.0.1.vc11').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2')) - assert MatchSpec('numpy >=1.0.1*.vc11').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2')) + assert not MatchSpec("numpy >=1.0.1.vc11").match( + DPkg("numpy-1.0.1a.vc11-0.tar.bz2") + ) + assert MatchSpec("numpy >=1.0.1*.vc11").match( + DPkg("numpy-1.0.1a.vc11-0.tar.bz2") + ) # one conforming, other non-conforming to PEP 440 - assert MatchSpec('numpy <1.0.1').match(DPkg('numpy-1.0.1.vc11-0.tar.bz2')) - assert MatchSpec('numpy <1.0.1').match(DPkg('numpy-1.0.1a.vc11-0.tar.bz2')) - assert not MatchSpec('numpy >=1.0.1.vc11').match(DPkg('numpy-1.0.1a-0.tar.bz2')) - assert MatchSpec('numpy >=1.0.1a').match(DPkg('numpy-1.0.1z-0.tar.bz2')) - assert MatchSpec('numpy >=1.0.1a py27*').match(DPkg('numpy-1.0.1z-py27_1.tar.bz2')) - assert MatchSpec('blas * openblas_0').match(DPkg('blas-1.0-openblas_0.tar.bz2')) - - assert MatchSpec('blas')._is_simple() - assert not MatchSpec('blas 1.0')._is_simple() - assert not MatchSpec('blas 1.0 1')._is_simple() - - m = MatchSpec('blas 1.0', optional=True) + assert MatchSpec("numpy <1.0.1").match(DPkg("numpy-1.0.1.vc11-0.tar.bz2")) + assert MatchSpec("numpy <1.0.1").match(DPkg("numpy-1.0.1a.vc11-0.tar.bz2")) + assert not MatchSpec("numpy >=1.0.1.vc11").match(DPkg("numpy-1.0.1a-0.tar.bz2")) + assert MatchSpec("numpy >=1.0.1a").match(DPkg("numpy-1.0.1z-0.tar.bz2")) + assert MatchSpec("numpy >=1.0.1a py27*").match( + DPkg("numpy-1.0.1z-py27_1.tar.bz2") + ) + assert MatchSpec("blas * openblas_0").match(DPkg("blas-1.0-openblas_0.tar.bz2")) + + assert MatchSpec("blas")._is_simple() + assert not MatchSpec("blas 1.0")._is_simple() + assert not MatchSpec("blas 1.0 1")._is_simple() + + m = MatchSpec("blas 1.0", optional=True) m2 = MatchSpec(m, optional=False) - m3 = MatchSpec(m2, target='blas-1.0-0.tar.bz2') + m3 = MatchSpec(m2, target="blas-1.0-0.tar.bz2") m4 = MatchSpec(m3, target=None, optional=True) assert m.spec == m2.spec and m.optional != m2.optional - assert m2.spec == m3.spec and m2.optional == m3.optional and m2.target != m3.target + assert ( + m2.spec == m3.spec and m2.optional == m3.optional and m2.target != m3.target + ) assert m == m4 self.assertRaises(ValueError, MatchSpec, (1, 2, 3)) def test_no_name_match_spec(self): ms = MatchSpec(track_features="mkl") - assert str(ms) == '*[track_features=mkl]' + assert str(ms) == "*[track_features=mkl]" def test_to_filename(self): - m1 = MatchSpec(fn='foo-1.7-52.tar.bz2') - m2 = MatchSpec(name='foo', version='1.7', build='52') - m3 = MatchSpec(Dist('defaults::foo-1.7-52')) - assert m1._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2' - assert m2._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2' - assert m3._to_filename_do_not_use() == 'foo-1.7-52.tar.bz2' - - for spec in 'bitarray', 'pycosat 0.6.0', 'numpy 1.6*': + m1 = MatchSpec(fn="foo-1.7-52.tar.bz2") + m2 = MatchSpec(name="foo", version="1.7", build="52") + m3 = MatchSpec(Dist("defaults::foo-1.7-52")) + assert m1._to_filename_do_not_use() == "foo-1.7-52.tar.bz2" + assert m2._to_filename_do_not_use() == "foo-1.7-52.tar.bz2" + assert m3._to_filename_do_not_use() == "foo-1.7-52.tar.bz2" + + for spec in "bitarray", "pycosat 0.6.0", "numpy 1.6*": ms = MatchSpec(spec) assert ms._to_filename_do_not_use() is None def test_hash(self): - a = MatchSpec('numpy 1.7*') - b = MatchSpec('numpy 1.7*') - c = MatchSpec(name='numpy', version='1.7*') + a = MatchSpec("numpy 1.7*") + b = MatchSpec("numpy 1.7*") + c = MatchSpec(name="numpy", version="1.7*") # optional should not change the hash d = MatchSpec(c, optional=True) assert d.optional @@ -125,10 +144,10 @@ def test_hash(self): assert hash(a) == hash(b) assert hash(a) == hash(c) assert hash(a) != hash(d) - c = MatchSpec('python') - d = MatchSpec('python 2.7.4') - e = MatchSpec('python', version='2.7.4') - f = MatchSpec('* 2.7.4', name='python') + c = MatchSpec("python") + d = MatchSpec("python 2.7.4") + e = MatchSpec("python", version="2.7.4") + f = MatchSpec("* 2.7.4", name="python") assert d == e assert d == f assert a != c @@ -173,34 +192,56 @@ def test_canonical_string_forms(self): assert m("numpy[version='1.7|1.8']") == "numpy[version='1.7|1.8']" assert m('numpy[version="1.7,1.8"]') == "numpy[version='1.7,1.8']" - assert m('numpy >1.7') == "numpy[version='>1.7']" - assert m('numpy>=1.7') == "numpy[version='>=1.7']" + assert m("numpy >1.7") == "numpy[version='>1.7']" + assert m("numpy>=1.7") == "numpy[version='>=1.7']" assert m("numpy=1.7=py3*_2") == "numpy==1.7[build=py3*_2]" assert m("numpy=1.7.*=py3*_2") == "numpy=1.7[build=py3*_2]" assert m("https://repo.anaconda.com/pkgs/free::numpy") == "pkgs/free::numpy" - assert m("numpy[channel=https://repo.anaconda.com/pkgs/free]") == "pkgs/free::numpy" + assert ( + m("numpy[channel=https://repo.anaconda.com/pkgs/free]") + == "pkgs/free::numpy" + ) assert m("defaults::numpy") == "defaults::numpy" assert m("numpy[channel=defaults]") == "defaults::numpy" assert m("conda-forge::numpy") == "conda-forge::numpy" assert m("numpy[channel=conda-forge]") == "conda-forge::numpy" assert m("numpy[channel=defaults,subdir=osx-64]") == "defaults/osx-64::numpy" - assert m("numpy[channel=https://repo.anaconda.com/pkgs/free/osx-64, subdir=linux-64]") == "pkgs/free/linux-64::numpy" - assert m("https://repo.anaconda.com/pkgs/free/win-32::numpy") == "pkgs/free/win-32::numpy" - assert m("numpy[channel=https://repo.anaconda.com/pkgs/free/osx-64]") == "pkgs/free/osx-64::numpy" + assert ( + m( + "numpy[channel=https://repo.anaconda.com/pkgs/free/osx-64, subdir=linux-64]" + ) + == "pkgs/free/linux-64::numpy" + ) + assert ( + m("https://repo.anaconda.com/pkgs/free/win-32::numpy") + == "pkgs/free/win-32::numpy" + ) + assert ( + m("numpy[channel=https://repo.anaconda.com/pkgs/free/osx-64]") + == "pkgs/free/osx-64::numpy" + ) assert m("defaults/win-32::numpy") == "defaults/win-32::numpy" assert m("conda-forge/linux-64::numpy") == "conda-forge/linux-64::numpy" - assert m("numpy[channel=conda-forge,subdir=noarch]") == "conda-forge/noarch::numpy" + assert ( + m("numpy[channel=conda-forge,subdir=noarch]") == "conda-forge/noarch::numpy" + ) - assert m("numpy[subdir=win-32]") == 'numpy[subdir=win-32]' - assert m("*/win-32::numpy") == 'numpy[subdir=win-32]' - assert m("*/win-32::numpy[subdir=\"osx-64\"]") == 'numpy[subdir=osx-64]' + assert m("numpy[subdir=win-32]") == "numpy[subdir=win-32]" + assert m("*/win-32::numpy") == "numpy[subdir=win-32]" + assert m('*/win-32::numpy[subdir="osx-64"]') == "numpy[subdir=osx-64]" # TODO: should the result in these example pull out subdir? - assert m("https://repo.anaconda.com/pkgs/free/linux-32::numpy") == "pkgs/free/linux-32::numpy" - assert m("numpy[channel=https://repo.anaconda.com/pkgs/free/linux-32]") == "pkgs/free/linux-32::numpy" + assert ( + m("https://repo.anaconda.com/pkgs/free/linux-32::numpy") + == "pkgs/free/linux-32::numpy" + ) + assert ( + m("numpy[channel=https://repo.anaconda.com/pkgs/free/linux-32]") + == "pkgs/free/linux-32::numpy" + ) assert m("numpy=1.10=py38_0") == "numpy==1.10=py38_0" assert m("numpy==1.10=py38_0") == "numpy==1.10=py38_0" @@ -212,7 +253,9 @@ def test_canonical_string_forms(self): assert m("numpy !=1.10 py38_0") == "numpy[version='!=1.10',build=py38_0]" assert m("numpy!=1.10=py38_0") == "numpy[version='!=1.10',build=py38_0]" assert m("numpy !=1.10=py38_0") == "numpy[version='!=1.10',build=py38_0]" - assert m("numpy >1.7,!=1.10 py38_0") == "numpy[version='>1.7,!=1.10',build=py38_0]" + assert ( + m("numpy >1.7,!=1.10 py38_0") == "numpy[version='>1.7,!=1.10',build=py38_0]" + ) assert m("numpy!=1.10.*") == "numpy!=1.10.*" assert m("numpy!=1.10,!=1.11") == "numpy[version='!=1.10,!=1.11']" assert m("numpy=1.10.*,!=1.10.2") == "numpy[version='=1.10.*,!=1.10.2']" @@ -230,14 +273,31 @@ def test_canonical_string_forms(self): # assert m("numpy-1.10-py38_0[channel=defaults]") == "defaults::numpy==1.10=py38_0" # assert m("*/win-32::numpy-1.10-py38_0[channel=defaults]") == "defaults/win-32::numpy==1.10=py38_0" - @pytest.mark.skip(reason="key-value features interface has been disabled in conda 4.4") + @pytest.mark.skip( + reason="key-value features interface has been disabled in conda 4.4" + ) def test_key_value_features_canonical_string_forms(self): - assert m("numpy[build=py3*_2, track_features=mkl]") == "numpy[build=py3*_2,provides_features='blas=mkl']" - assert m("numpy[build=py3*_2, track_features='mkl debug']") == "numpy[build=py3*_2,provides_features='blas=mkl debug=true']" - assert m("numpy[track_features='mkl,debug', build=py3*_2]") == "numpy[build=py3*_2,provides_features='blas=mkl debug=true']" - assert m("numpy[track_features='mkl,debug' build=py3*_2]") == "numpy[build=py3*_2,provides_features='blas=mkl debug=true']" + assert ( + m("numpy[build=py3*_2, track_features=mkl]") + == "numpy[build=py3*_2,provides_features='blas=mkl']" + ) + assert ( + m("numpy[build=py3*_2, track_features='mkl debug']") + == "numpy[build=py3*_2,provides_features='blas=mkl debug=true']" + ) + assert ( + m("numpy[track_features='mkl,debug', build=py3*_2]") + == "numpy[build=py3*_2,provides_features='blas=mkl debug=true']" + ) + assert ( + m("numpy[track_features='mkl,debug' build=py3*_2]") + == "numpy[build=py3*_2,provides_features='blas=mkl debug=true']" + ) - assert m('numpy[features="mkl debug" build_number=2]') == "numpy[build_number=2,provides_features='blas=mkl debug=true']" + assert ( + m('numpy[features="mkl debug" build_number=2]') + == "numpy[build_number=2,provides_features='blas=mkl debug=true']" + ) def test_legacy_features_canonical_string_forms(self): assert m("mkl@") == "*[track_features=mkl]" @@ -247,8 +307,14 @@ def test_legacy_features_canonical_string_forms(self): def test_tarball_match_specs(self): url = "https://conda.anaconda.org/conda-canary/linux-64/conda-4.3.21.post699+1dab973-py36h4a561cd_0.tar.bz2" - assert m(url) == "conda-canary/linux-64::conda==4.3.21.post699+1dab973=py36h4a561cd_0" - assert m("conda-canary/linux-64::conda==4.3.21.post699+1dab973=py36h4a561cd_0") == "conda-canary/linux-64::conda==4.3.21.post699+1dab973=py36h4a561cd_0" + assert ( + m(url) + == "conda-canary/linux-64::conda==4.3.21.post699+1dab973=py36h4a561cd_0" + ) + assert ( + m("conda-canary/linux-64::conda==4.3.21.post699+1dab973=py36h4a561cd_0") + == "conda-canary/linux-64::conda==4.3.21.post699+1dab973=py36h4a561cd_0" + ) url = "https://conda.anaconda.org/conda-canary/conda-4.3.21.post699+1dab973-py36h4a561cd_0.tar.bz2" assert m(url) == "*[url=%s]" % url @@ -269,7 +335,7 @@ def test_tarball_match_specs(self): assert not MatchSpec(url=url, md5="1234").match(pref1) assert not MatchSpec(url=url, md5="1234").match(pref1.dump()) assert MatchSpec(url=url, md5="1234").match(pref2) - assert MatchSpec(url=url, md5="1234").get('md5') == "1234" + assert MatchSpec(url=url, md5="1234").get("md5") == "1234" url = "file:///var/folders/cp/7r2s_s593j7_cpdtxxsmct880000gp/T/edfc ñçêáôß/flask-0.10.1-py35_2.tar.bz2" assert m(url) == "*[url='%s']" % url @@ -280,57 +346,79 @@ def test_tarball_match_specs(self): # assert MatchSpec('defaults/zos::python').get_exact_value('channel').urls() == () def test_exact_values(self): - assert MatchSpec("*").get_exact_value('name') is None - assert MatchSpec("numpy").get_exact_value('name') == 'numpy' + assert MatchSpec("*").get_exact_value("name") is None + assert MatchSpec("numpy").get_exact_value("name") == "numpy" - assert MatchSpec("numpy=1.7").get_exact_value('version') is None - assert MatchSpec("numpy==1.7").get_exact_value('version') == '1.7' - assert MatchSpec("numpy[version=1.7]").get_exact_value('version') == '1.7' + assert MatchSpec("numpy=1.7").get_exact_value("version") is None + assert MatchSpec("numpy==1.7").get_exact_value("version") == "1.7" + assert MatchSpec("numpy[version=1.7]").get_exact_value("version") == "1.7" - assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('version') == '1.7' - assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('build') is None - assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('version') == '1.7' - assert MatchSpec("numpy=1.7=py3*_2").get_exact_value('build') is None - assert MatchSpec("numpy=1.7.*=py37_2").get_exact_value('version') is None - assert MatchSpec("numpy=1.7.*=py37_2").get_exact_value('build') == 'py37_2' + assert MatchSpec("numpy=1.7=py3*_2").get_exact_value("version") == "1.7" + assert MatchSpec("numpy=1.7=py3*_2").get_exact_value("build") is None + assert MatchSpec("numpy=1.7=py3*_2").get_exact_value("version") == "1.7" + assert MatchSpec("numpy=1.7=py3*_2").get_exact_value("build") is None + assert MatchSpec("numpy=1.7.*=py37_2").get_exact_value("version") is None + assert MatchSpec("numpy=1.7.*=py37_2").get_exact_value("build") == "py37_2" def test_channel_matching(self): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): - assert ChannelMatch('pkgs/main').match('defaults') is False - assert ChannelMatch('defaults').match('pkgs/main') is True + assert ChannelMatch("pkgs/main").match("defaults") is False + assert ChannelMatch("defaults").match("pkgs/main") is True - assert ChannelMatch("https://repo.anaconda.com/pkgs/main").match('defaults') is False - assert ChannelMatch("defaults").match("https://repo.anaconda.com/pkgs/main") is True + assert ( + ChannelMatch("https://repo.anaconda.com/pkgs/main").match("defaults") + is False + ) + assert ( + ChannelMatch("defaults").match("https://repo.anaconda.com/pkgs/main") + is True + ) - assert ChannelMatch("https://conda.anaconda.org/conda-forge").match('conda-forge') is True - assert ChannelMatch("conda-forge").match("https://conda.anaconda.org/conda-forge") is True + assert ( + ChannelMatch("https://conda.anaconda.org/conda-forge").match( + "conda-forge" + ) + is True + ) + assert ( + ChannelMatch("conda-forge").match( + "https://conda.anaconda.org/conda-forge" + ) + is True + ) - assert ChannelMatch("https://repo.anaconda.com/pkgs/main").match('conda-forge') is False + assert ( + ChannelMatch("https://repo.anaconda.com/pkgs/main").match("conda-forge") + is False + ) assert str(MatchSpec("pkgs/main::*")) == "pkgs/main::*" assert str(MatchSpec("defaults::*")) == "defaults::*" def test_matchspec_errors(self): with pytest.raises(InvalidSpec): - MatchSpec('blas [optional') + MatchSpec("blas [optional") with pytest.raises(InvalidSpec): - MatchSpec('blas [test=]') + MatchSpec("blas [test=]") with pytest.raises(InvalidSpec): MatchSpec('blas[invalid="1"]') if not on_win: # skipping on Windows for now. don't feel like dealing with the windows url path crud - assert str(MatchSpec("/some/file/on/disk/package-1.2.3-2.tar.bz2")) == '*[url=file:///some/file/on/disk/package-1.2.3-2.tar.bz2]' + assert ( + str(MatchSpec("/some/file/on/disk/package-1.2.3-2.tar.bz2")) + == "*[url=file:///some/file/on/disk/package-1.2.3-2.tar.bz2]" + ) def test_dist(self): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): - dst = Dist('defaults::foo-1.2.3-4.tar.bz2') + dst = Dist("defaults::foo-1.2.3-4.tar.bz2") a = MatchSpec(dst) b = MatchSpec(a) - c = MatchSpec(dst, optional=True, target='burg') - d = MatchSpec(a, build='5') + c = MatchSpec(dst, optional=True, target="burg") + d = MatchSpec(a, build="5") assert a == b assert hash(a) == hash(b) @@ -342,32 +430,82 @@ def test_dist(self): assert a != d assert hash(a) != hash(d) - p = MatchSpec(channel='defaults',name='python',version=VersionSpec('3.5*')) - assert p.match(Dist(channel='defaults', dist_name='python-3.5.3-1', name='python', - version='3.5.3', build_string='1', build_number=1, base_url=None, - platform=None)) - - assert not p.match(Dist(channel='defaults', dist_name='python-3.6.0-0', name='python', - version='3.6.0', build_string='0', build_number=0, base_url=None, - platform=None)) - - assert p.match(Dist(channel='defaults', dist_name='python-3.5.1-0', name='python', - version='3.5.1', build_string='0', build_number=0, base_url=None, - platform=None)) - assert p.match(PackageRecord(name='python', version='3.5.1', build='0', build_number=0, - depends=('openssl 1.0.2*', 'readline 6.2*', 'sqlite', - 'tk 8.5*', 'xz 5.0.5', 'zlib 1.2*', 'pip'), - channel=Channel(scheme='https', auth=None, - location='repo.anaconda.com', token=None, - name='pkgs/main', platform='osx-64', - package_filename=None), - subdir='osx-64', fn='python-3.5.1-0.tar.bz2', - md5='a813bc0a32691ab3331ac9f37125164c', size=14678857, - priority=0, - url='https://repo.anaconda.com/pkgs/main/osx-64/python-3.5.1-0.tar.bz2')) + p = MatchSpec( + channel="defaults", name="python", version=VersionSpec("3.5*") + ) + assert p.match( + Dist( + channel="defaults", + dist_name="python-3.5.3-1", + name="python", + version="3.5.3", + build_string="1", + build_number=1, + base_url=None, + platform=None, + ) + ) + + assert not p.match( + Dist( + channel="defaults", + dist_name="python-3.6.0-0", + name="python", + version="3.6.0", + build_string="0", + build_number=0, + base_url=None, + platform=None, + ) + ) + + assert p.match( + Dist( + channel="defaults", + dist_name="python-3.5.1-0", + name="python", + version="3.5.1", + build_string="0", + build_number=0, + base_url=None, + platform=None, + ) + ) + assert p.match( + PackageRecord( + name="python", + version="3.5.1", + build="0", + build_number=0, + depends=( + "openssl 1.0.2*", + "readline 6.2*", + "sqlite", + "tk 8.5*", + "xz 5.0.5", + "zlib 1.2*", + "pip", + ), + channel=Channel( + scheme="https", + auth=None, + location="repo.anaconda.com", + token=None, + name="pkgs/main", + platform="osx-64", + package_filename=None, + ), + subdir="osx-64", + fn="python-3.5.1-0.tar.bz2", + md5="a813bc0a32691ab3331ac9f37125164c", + size=14678857, + priority=0, + url="https://repo.anaconda.com/pkgs/main/osx-64/python-3.5.1-0.tar.bz2", + ) + ) def test_index_record(self): - dst = Dist('defaults::foo-1.2.3-4.tar.bz2') + dst = Dist("defaults::foo-1.2.3-4.tar.bz2") rec = DPkg(dst) a = MatchSpec(dst) b = MatchSpec(rec) @@ -376,70 +514,70 @@ def test_index_record(self): assert a.match(rec) def test_strictness(self): - assert MatchSpec('foo').strictness == 1 - assert MatchSpec('foo 1.2').strictness == 2 - assert MatchSpec('foo 1.2 3').strictness == 3 - assert MatchSpec('foo 1.2 3 [channel=burg]').strictness == 3 + assert MatchSpec("foo").strictness == 1 + assert MatchSpec("foo 1.2").strictness == 2 + assert MatchSpec("foo 1.2 3").strictness == 3 + assert MatchSpec("foo 1.2 3 [channel=burg]").strictness == 3 # Seems odd, but this is needed for compatibility - assert MatchSpec('test* 1.2').strictness == 3 - assert MatchSpec('foo', build_number=2).strictness == 3 + assert MatchSpec("test* 1.2").strictness == 3 + assert MatchSpec("foo", build_number=2).strictness == 3 def test_build_number_and_filename(self): - ms = MatchSpec('zlib 1.2.7 0') - assert ms.get_exact_value('name') == 'zlib' - assert ms.get_exact_value('version') == '1.2.7' - assert ms.get_exact_value('build') == '0' - assert ms._to_filename_do_not_use() == 'zlib-1.2.7-0.tar.bz2' + ms = MatchSpec("zlib 1.2.7 0") + assert ms.get_exact_value("name") == "zlib" + assert ms.get_exact_value("version") == "1.2.7" + assert ms.get_exact_value("build") == "0" + assert ms._to_filename_do_not_use() == "zlib-1.2.7-0.tar.bz2" def test_openssl_match(self): - dst = Dist('defaults::openssl-1.0.1_-4') - assert MatchSpec('openssl>=1.0.1_').match(DPkg(dst)) - assert not MatchSpec('openssl>=1.0.1').match(DPkg(dst)) + dst = Dist("defaults::openssl-1.0.1_-4") + assert MatchSpec("openssl>=1.0.1_").match(DPkg(dst)) + assert not MatchSpec("openssl>=1.0.1").match(DPkg(dst)) def test_track_features_match(self): - dst = Dist('defaults::foo-1.2.3-4.tar.bz2') - a = MatchSpec(features='test') + dst = Dist("defaults::foo-1.2.3-4.tar.bz2") + a = MatchSpec(features="test") assert str(a) == "*[features=test]" assert not a.match(DPkg(dst)) - assert not a.match(DPkg(dst, track_features='')) + assert not a.match(DPkg(dst, track_features="")) - a = MatchSpec(track_features='test') - assert a.match(DPkg(dst, track_features='test')) - assert not a.match(DPkg(dst, track_features='test2')) - assert not a.match(DPkg(dst, track_features='test me')) - assert not a.match(DPkg(dst, track_features='you test')) - assert not a.match(DPkg(dst, track_features='you test me')) - assert a.get_exact_value('track_features') == frozenset(('test',)) + a = MatchSpec(track_features="test") + assert a.match(DPkg(dst, track_features="test")) + assert not a.match(DPkg(dst, track_features="test2")) + assert not a.match(DPkg(dst, track_features="test me")) + assert not a.match(DPkg(dst, track_features="you test")) + assert not a.match(DPkg(dst, track_features="you test me")) + assert a.get_exact_value("track_features") == frozenset(("test",)) - b = MatchSpec(track_features='mkl') + b = MatchSpec(track_features="mkl") assert not b.match(DPkg(dst)) - assert b.match(DPkg(dst, track_features='mkl')) - assert b.match(DPkg(dst, track_features='mkl')) - assert not b.match(DPkg(dst, track_features='mkl debug')) - assert not b.match(DPkg(dst, track_features='debug')) + assert b.match(DPkg(dst, track_features="mkl")) + assert b.match(DPkg(dst, track_features="mkl")) + assert not b.match(DPkg(dst, track_features="mkl debug")) + assert not b.match(DPkg(dst, track_features="debug")) - c = MatchSpec(track_features='nomkl') + c = MatchSpec(track_features="nomkl") assert not c.match(DPkg(dst)) - assert not c.match(DPkg(dst, track_features='mkl')) - assert c.match(DPkg(dst, track_features='nomkl')) - assert not c.match(DPkg(dst, track_features='nomkl debug')) + assert not c.match(DPkg(dst, track_features="mkl")) + assert c.match(DPkg(dst, track_features="nomkl")) + assert not c.match(DPkg(dst, track_features="nomkl debug")) # regression test for #6860 - d = MatchSpec(track_features='') - assert d.get_exact_value('track_features') == frozenset() - d = MatchSpec(track_features=' ') - assert d.get_exact_value('track_features') == frozenset() - d = MatchSpec(track_features=('', '')) - assert d.get_exact_value('track_features') == frozenset() - d = MatchSpec(track_features=('', '', 'test')) - assert d.get_exact_value('track_features') == frozenset(('test',)) + d = MatchSpec(track_features="") + assert d.get_exact_value("track_features") == frozenset() + d = MatchSpec(track_features=" ") + assert d.get_exact_value("track_features") == frozenset() + d = MatchSpec(track_features=("", "")) + assert d.get_exact_value("track_features") == frozenset() + d = MatchSpec(track_features=("", "", "test")) + assert d.get_exact_value("track_features") == frozenset(("test",)) def test_bracket_matches(self): record = { - 'name': 'numpy', - 'version': '1.11.0', - 'build': 'py34_7', - 'build_number': 7, + "name": "numpy", + "version": "1.11.0", + "build": "py34_7", + "build_number": 7, } assert MatchSpec("numpy<2").match(record) @@ -457,12 +595,12 @@ def test_bracket_matches(self): def test_license_match(self): record = { - 'name': 'numpy', - 'version': '1.11.0', - 'build': 'py34_7', - 'build_number': 7, - 'license': 'LGPLv3+', - 'license_family': 'LGPL', + "name": "numpy", + "version": "1.11.0", + "build": "py34_7", + "build_number": 7, + "license": "LGPLv3+", + "license_family": "LGPL", } assert MatchSpec("*[license_family='LGPL']").match(record) assert MatchSpec("*[license_family='lgpl']").match(record) @@ -476,60 +614,67 @@ def test_license_match(self): class TestArg2Spec(TestCase): - def test_simple(self): - assert arg2spec('python') == 'python' - assert arg2spec('python=2.6') == 'python=2.6' - assert arg2spec('python=2.6*') == 'python=2.6' - assert arg2spec('ipython=0.13.2') == 'ipython=0.13.2' - assert arg2spec('ipython=0.13.0') == 'ipython=0.13.0' - assert arg2spec('ipython==0.13.0') == 'ipython==0.13.0' - assert arg2spec('foo=1.3.0=3') == 'foo==1.3.0=3' + assert arg2spec("python") == "python" + assert arg2spec("python=2.6") == "python=2.6" + assert arg2spec("python=2.6*") == "python=2.6" + assert arg2spec("ipython=0.13.2") == "ipython=0.13.2" + assert arg2spec("ipython=0.13.0") == "ipython=0.13.0" + assert arg2spec("ipython==0.13.0") == "ipython==0.13.0" + assert arg2spec("foo=1.3.0=3") == "foo==1.3.0=3" def test_pip_style(self): - assert arg2spec('foo>=1.3') == "foo[version='>=1.3']" - assert arg2spec('zope.int>=1.3,<3.0') == "zope.int[version='>=1.3,<3.0']" - assert arg2spec('numpy >=1.9') == "numpy[version='>=1.9']" + assert arg2spec("foo>=1.3") == "foo[version='>=1.3']" + assert arg2spec("zope.int>=1.3,<3.0") == "zope.int[version='>=1.3,<3.0']" + assert arg2spec("numpy >=1.9") == "numpy[version='>=1.9']" def test_invalid_arg2spec(self): with pytest.raises(CondaValueError): - arg2spec('!xyz 1.3') + arg2spec("!xyz 1.3") class TestSpecFromLine(TestCase): - def cb_form(self, spec_str): return MatchSpec(spec_str).conda_build_form() def test_invalid(self): - assert spec_from_line('=') is None - assert spec_from_line('foo 1.0') is None + assert spec_from_line("=") is None + assert spec_from_line("foo 1.0") is None def test_comment(self): - assert spec_from_line('foo # comment') == 'foo' == self.cb_form('foo # comment') - assert spec_from_line('foo ## comment') == 'foo' == self.cb_form('foo ## comment') + assert spec_from_line("foo # comment") == "foo" == self.cb_form("foo # comment") + assert ( + spec_from_line("foo ## comment") == "foo" == self.cb_form("foo ## comment") + ) def test_conda_style(self): - assert spec_from_line('foo') == 'foo' == self.cb_form('foo') - assert spec_from_line('foo=1.0=2') == 'foo 1.0 2' == self.cb_form('foo=1.0=2') + assert spec_from_line("foo") == "foo" == self.cb_form("foo") + assert spec_from_line("foo=1.0=2") == "foo 1.0 2" == self.cb_form("foo=1.0=2") # assert spec_from_line('foo=1.0*') == 'foo 1.0.*' == self.cb_form('foo=1.0*') # assert spec_from_line('foo=1.0|1.2') == 'foo 1.0|1.2' == self.cb_form('foo=1.0|1.2') # assert spec_from_line('foo=1.0') == 'foo 1.0' == self.cb_form('foo=1.0') def test_pip_style(self): - assert spec_from_line('foo>=1.0') == 'foo >=1.0' == self.cb_form('foo>=1.0') - assert spec_from_line('foo >=1.0') == 'foo >=1.0' == self.cb_form('foo >=1.0') - assert spec_from_line('FOO-Bar >=1.0') == 'foo-bar >=1.0' == self.cb_form('FOO-Bar >=1.0') - assert spec_from_line('foo >= 1.0') == 'foo >=1.0' == self.cb_form('foo >= 1.0') - assert spec_from_line('foo > 1.0') == 'foo >1.0' == self.cb_form('foo > 1.0') - assert spec_from_line('foo != 1.0') == 'foo !=1.0' == self.cb_form('foo != 1.0') - assert spec_from_line('foo <1.0') == 'foo <1.0' == self.cb_form('foo <1.0') - assert spec_from_line('foo >=1.0 , < 2.0') == 'foo >=1.0,<2.0' == self.cb_form('foo >=1.0 , < 2.0') + assert spec_from_line("foo>=1.0") == "foo >=1.0" == self.cb_form("foo>=1.0") + assert spec_from_line("foo >=1.0") == "foo >=1.0" == self.cb_form("foo >=1.0") + assert ( + spec_from_line("FOO-Bar >=1.0") + == "foo-bar >=1.0" + == self.cb_form("FOO-Bar >=1.0") + ) + assert spec_from_line("foo >= 1.0") == "foo >=1.0" == self.cb_form("foo >= 1.0") + assert spec_from_line("foo > 1.0") == "foo >1.0" == self.cb_form("foo > 1.0") + assert spec_from_line("foo != 1.0") == "foo !=1.0" == self.cb_form("foo != 1.0") + assert spec_from_line("foo <1.0") == "foo <1.0" == self.cb_form("foo <1.0") + assert ( + spec_from_line("foo >=1.0 , < 2.0") + == "foo >=1.0,<2.0" + == self.cb_form("foo >=1.0 , < 2.0") + ) class SpecStrParsingTests(TestCase): - def test_parse_spec_str_tarball_url(self): with env_unmodified(conda_tests_ctxt_mgmt_def_pol): url = "https://repo.anaconda.com/pkgs/main/linux-64/_license-1.1-py27_1.tar.bz2" @@ -645,15 +790,19 @@ def test_parse_spec_str_with_brackets(self): "version": "1.8", "build": "py27_0", } - assert _parse_spec_str("defaults::numpy=1.8=py27_0 [channel=anaconda,version=1.9, build=3]") == { + assert _parse_spec_str( + "defaults::numpy=1.8=py27_0 [channel=anaconda,version=1.9, build=3]" + ) == { "_original_spec_str": "defaults::numpy=1.8=py27_0 [channel=anaconda,version=1.9, build=3]", "channel": "anaconda", "name": "numpy", "version": "1.9", "build": "3", } - assert _parse_spec_str('defaults::numpy=1.8=py27_0 [channel=\'anaconda\',version=">=1.8,<2|1.9", build=\'3\']') == { - "_original_spec_str": 'defaults::numpy=1.8=py27_0 [channel=\'anaconda\',version=">=1.8,<2|1.9", build=\'3\']', + assert _parse_spec_str( + "defaults::numpy=1.8=py27_0 [channel='anaconda',version=\">=1.8,<2|1.9\", build='3']" + ) == { + "_original_spec_str": "defaults::numpy=1.8=py27_0 [channel='anaconda',version=\">=1.8,<2|1.9\", build='3']", "channel": "anaconda", "name": "numpy", "version": ">=1.8,<2|1.9", @@ -778,7 +927,7 @@ def test_parse_hard(self): def test_parse_errors(self): with pytest.raises(InvalidMatchSpec): - _parse_spec_str('!xyz 1.3') + _parse_spec_str("!xyz 1.3") def test_parse_channel_subdir(self): assert _parse_spec_str("conda-forge::foo>=1.0") == { @@ -818,28 +967,28 @@ def test_parse_build_number_brackets(self): assert _parse_spec_str("python[build_number=3]") == { "_original_spec_str": "python[build_number=3]", "name": "python", - "build_number": '3', + "build_number": "3", } assert _parse_spec_str("python[build_number='>3']") == { "_original_spec_str": "python[build_number='>3']", "name": "python", - "build_number": '>3', + "build_number": ">3", } assert _parse_spec_str("python[build_number='>=3']") == { "_original_spec_str": "python[build_number='>=3']", "name": "python", - "build_number": '>=3', + "build_number": ">=3", } assert _parse_spec_str("python[build_number='<3']") == { "_original_spec_str": "python[build_number='<3']", "name": "python", - "build_number": '<3', + "build_number": "<3", } assert _parse_spec_str("python[build_number='<=3']") == { "_original_spec_str": "python[build_number='<=3']", "name": "python", - "build_number": '<=3', + "build_number": "<=3", } # # these don't work right now, should they? @@ -864,13 +1013,17 @@ def test_parse_build_number_brackets(self): def test_dist_str(self): for ext in (CONDA_PACKAGE_EXTENSION_V1, CONDA_PACKAGE_EXTENSION_V2): - m1 = MatchSpec.from_dist_str(f"anaconda/{context.subdir}::python-3.6.6-0{ext}") + m1 = MatchSpec.from_dist_str( + f"anaconda/{context.subdir}::python-3.6.6-0{ext}" + ) m2 = MatchSpec.from_dist_str(f"anaconda/{context.subdir}::python-3.6.6-0") m3 = MatchSpec.from_dist_str( f"https://someurl.org/anaconda/{context.subdir}::python-3.6.6-0{ext}" ) m4 = MatchSpec.from_dist_str(f"python-3.6.6-0{ext}") - m5 = MatchSpec.from_dist_str(f"https://someurl.org/anaconda::python-3.6.6-0{ext}") + m5 = MatchSpec.from_dist_str( + f"https://someurl.org/anaconda::python-3.6.6-0{ext}" + ) pref = DPkg(f"anaconda::python-3.6.6-0{ext}") pref.url = f"https://someurl.org/anaconda/{context.subdir}" @@ -895,140 +1048,197 @@ def test_dist_str(self): class MatchSpecMergeTests(TestCase): - def test_merge_single_name(self): - specs = (MatchSpec('exact'), MatchSpec('exact 1.2.3 1'), MatchSpec('exact >1.0,<2')) + specs = ( + MatchSpec("exact"), + MatchSpec("exact 1.2.3 1"), + MatchSpec("exact >1.0,<2"), + ) merged_specs = MatchSpec.merge(specs) print(merged_specs) assert len(merged_specs) == 1 merged_spec = merged_specs[0] print(merged_spec) assert str(merged_spec) == "exact[version='1.2.3,>1.0,<2',build=1]" - assert merged_spec.match({ - 'name': 'exact', - 'version': '1.2.3', - 'build': '1', - 'build_number': 1, - }) - assert not merged_spec.match({ - 'name': 'exact', - 'version': '1.2.2', - 'build': '1', - 'build_number': 1, - }) - - specs = (MatchSpec('exact 1.2.3 1'), MatchSpec('exact 1.2.3 2')) + assert merged_spec.match( + { + "name": "exact", + "version": "1.2.3", + "build": "1", + "build_number": 1, + } + ) + assert not merged_spec.match( + { + "name": "exact", + "version": "1.2.2", + "build": "1", + "build_number": 1, + } + ) + + specs = (MatchSpec("exact 1.2.3 1"), MatchSpec("exact 1.2.3 2")) with pytest.raises(ValueError): MatchSpec.merge(specs) - merged_specs = MatchSpec.merge((MatchSpec('exact 1.2.3 1'),)) + merged_specs = MatchSpec.merge((MatchSpec("exact 1.2.3 1"),)) assert len(merged_specs) == 1 assert str(merged_specs[0]) == "exact==1.2.3=1" def test_merge_multiple_name(self): - specs = tuple(MatchSpec(s) for s in ( - 'exact', 'exact 1.2.3 1', - 'bounded >=1.0,<2.0', 'bounded >=1.5', 'bounded <=1.8', - 'exact >1.0,<2', - )) + specs = tuple( + MatchSpec(s) + for s in ( + "exact", + "exact 1.2.3 1", + "bounded >=1.0,<2.0", + "bounded >=1.5", + "bounded <=1.8", + "exact >1.0,<2", + ) + ) merged_specs = MatchSpec.merge(specs) print(merged_specs) assert len(merged_specs) == 2 - exact_spec = next(s for s in merged_specs if s.name == 'exact') - bounded_spec = next(s for s in merged_specs if s.name == 'bounded') + exact_spec = next(s for s in merged_specs if s.name == "exact") + bounded_spec = next(s for s in merged_specs if s.name == "bounded") assert str(exact_spec) == "exact[version='1.2.3,>1.0,<2',build=1]" assert str(bounded_spec) == "bounded[version='<=1.8,>=1.0,<2.0,>=1.5']" - assert not bounded_spec.match({ - 'name': 'bounded', - 'version': '1', - 'build': '6', - 'build_number': 6, - }) - assert bounded_spec.match({ - 'name': 'bounded', - 'version': '1.5', - 'build': '7', - 'build_number': 7, - }) - assert not bounded_spec.match({ - 'name': 'bounded', - 'version': '2', - 'build': '8', - 'build_number': 8, - }) + assert not bounded_spec.match( + { + "name": "bounded", + "version": "1", + "build": "6", + "build_number": 6, + } + ) + assert bounded_spec.match( + { + "name": "bounded", + "version": "1.5", + "build": "7", + "build_number": 7, + } + ) + assert not bounded_spec.match( + { + "name": "bounded", + "version": "2", + "build": "8", + "build_number": 8, + } + ) def test_channel_merge(self): - specs = (MatchSpec('pkgs/main::python'), MatchSpec('defaults::python')) + specs = (MatchSpec("pkgs/main::python"), MatchSpec("defaults::python")) with pytest.raises(ValueError): MatchSpec.merge(specs) - specs = (MatchSpec('defaults::python'), MatchSpec('pkgs/main::python')) + specs = (MatchSpec("defaults::python"), MatchSpec("pkgs/main::python")) with pytest.raises(ValueError): MatchSpec.merge(specs) - specs = (MatchSpec('defaults::python'), MatchSpec('defaults::python 1.2.3')) + specs = (MatchSpec("defaults::python"), MatchSpec("defaults::python 1.2.3")) merged = MatchSpec.merge(specs) assert len(merged) == 1 assert str(merged[0]) == "defaults::python==1.2.3" - specs = (MatchSpec('pkgs/free::python'), MatchSpec('pkgs/free::python 1.2.3')) + specs = (MatchSpec("pkgs/free::python"), MatchSpec("pkgs/free::python 1.2.3")) merged = MatchSpec.merge(specs) assert len(merged) == 1 assert str(merged[0]) == "pkgs/free::python==1.2.3" def test_subdir_merge(self): - specs = (MatchSpec('pkgs/main/linux-64::python'), MatchSpec('pkgs/main/linux-32::python')) + specs = ( + MatchSpec("pkgs/main/linux-64::python"), + MatchSpec("pkgs/main/linux-32::python"), + ) with pytest.raises(ValueError): MatchSpec.merge(specs) - specs = (MatchSpec('defaults/win-32::python'), MatchSpec('defaults/win-64::python')) + specs = ( + MatchSpec("defaults/win-32::python"), + MatchSpec("defaults/win-64::python"), + ) with pytest.raises(ValueError): MatchSpec.merge(specs) - specs = (MatchSpec('pkgs/free/linux-64::python'), MatchSpec('pkgs/free::python 1.2.3')) + specs = ( + MatchSpec("pkgs/free/linux-64::python"), + MatchSpec("pkgs/free::python 1.2.3"), + ) merged = MatchSpec.merge(specs) assert len(merged) == 1 assert str(merged[0]) == "pkgs/free/linux-64::python==1.2.3" - assert merged[0] == MatchSpec(channel='pkgs/free', subdir='linux-64', name='python', version='1.2.3') + assert merged[0] == MatchSpec( + channel="pkgs/free", subdir="linux-64", name="python", version="1.2.3" + ) def test_build_merge(self): - specs = (MatchSpec('python[build=py27_1]'), MatchSpec('python=1.2.3=py27_1'), MatchSpec('conda-forge::python<=8')) + specs = ( + MatchSpec("python[build=py27_1]"), + MatchSpec("python=1.2.3=py27_1"), + MatchSpec("conda-forge::python<=8"), + ) merged = MatchSpec.merge(specs) assert len(merged) == 1 assert str(merged[0]) == "conda-forge::python[version='1.2.3,<=8',build=py27_1]" - specs = (MatchSpec('python[build=py27_1]'), MatchSpec('python=1.2.3=1'), MatchSpec('conda-forge::python<=8[build=py27_1]')) + specs = ( + MatchSpec("python[build=py27_1]"), + MatchSpec("python=1.2.3=1"), + MatchSpec("conda-forge::python<=8[build=py27_1]"), + ) with pytest.raises(ValueError): MatchSpec.merge(specs) def test_build_number_merge(self): - specs = (MatchSpec('python[build_number=1]'), MatchSpec('python=1.2.3=py27_7'), MatchSpec('conda-forge::python<=8[build_number=1]')) + specs = ( + MatchSpec("python[build_number=1]"), + MatchSpec("python=1.2.3=py27_7"), + MatchSpec("conda-forge::python<=8[build_number=1]"), + ) merged = MatchSpec.merge(specs) assert len(merged) == 1 - assert str(merged[0]) == "conda-forge::python[version='1.2.3,<=8',build=py27_7,build_number=1]" + assert ( + str(merged[0]) + == "conda-forge::python[version='1.2.3,<=8',build=py27_7,build_number=1]" + ) - specs = (MatchSpec('python[build_number=2]'), MatchSpec('python=1.2.3=py27_7'), MatchSpec('python<=8[build_number=1]')) + specs = ( + MatchSpec("python[build_number=2]"), + MatchSpec("python=1.2.3=py27_7"), + MatchSpec("python<=8[build_number=1]"), + ) with pytest.raises(ValueError): MatchSpec.merge(specs) def test_md5_merge_with_name(self): - specs = (MatchSpec('python[md5=deadbeef]'), MatchSpec('python=1.2.3'), MatchSpec('conda-forge::python[md5=deadbeef]')) + specs = ( + MatchSpec("python[md5=deadbeef]"), + MatchSpec("python=1.2.3"), + MatchSpec("conda-forge::python[md5=deadbeef]"), + ) merged = MatchSpec.merge(specs) assert len(merged) == 1 assert str(merged[0]) == "conda-forge::python=1.2.3[md5=deadbeef]" - specs = (MatchSpec('python[md5=FFBADD11]'), MatchSpec('python=1.2.3'), MatchSpec('python[md5=ffbadd11]')) + specs = ( + MatchSpec("python[md5=FFBADD11]"), + MatchSpec("python=1.2.3"), + MatchSpec("python[md5=ffbadd11]"), + ) with pytest.raises(ValueError): MatchSpec.merge(specs) def test_md5_merge_wo_name(self): - specs = (MatchSpec('*[md5=deadbeef]'), MatchSpec('*[md5=FFBADD11]')) + specs = (MatchSpec("*[md5=deadbeef]"), MatchSpec("*[md5=FFBADD11]")) merged = MatchSpec.merge(specs) assert len(merged) == 2 - str_specs = ('*[md5=deadbeef]', '*[md5=FFBADD11]') + str_specs = ("*[md5=deadbeef]", "*[md5=FFBADD11]") assert str(merged[0]) in str_specs assert str(merged[1]) in str_specs assert str(merged[0]) != str(merged[1]) diff --git a/tests/models/test_package_info.py b/tests/models/test_package_info.py index 258d940e4c4..a1bf0136123 100644 --- a/tests/models/test_package_info.py +++ b/tests/models/test_package_info.py @@ -1,40 +1,51 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from unittest import TestCase from conda.base.context import context from conda.models.channel import Channel from conda.models.enums import FileMode, PathType -from conda.models.records import PackageRecord, PathData, PathsData from conda.models.package_info import Noarch, PackageInfo, PackageMetadata +from conda.models.records import PackageRecord, PathData, PathsData class DefaultPackageInfo(TestCase): def test_package_info(self): - index_json_record = PackageRecord(build=0, build_number=0, name="test_foo", version=0, - channel='defaults', subdir=context.subdir, fn='doesnt-matter', - md5='0123456789') + index_json_record = PackageRecord( + build=0, + build_number=0, + name="test_foo", + version=0, + channel="defaults", + subdir=context.subdir, + fn="doesnt-matter", + md5="0123456789", + ) icondata = "icondata" package_metadata = PackageMetadata( package_metadata_version=1, noarch=Noarch(type="python", entry_points=["test:foo"]), ) - paths = [PathData(_path="test/path/1", file_mode=FileMode.text, path_type=PathType.hardlink, - prefix_placeholder="/opt/anaconda1anaconda2anaconda3", ), - PathData(_path="test/path/2", no_link=True, path_type=PathType.hardlink), - PathData(_path="test/path/3", path_type=PathType.softlink), - PathData(_path="menu/test.json", path_type=PathType.hardlink)] + paths = [ + PathData( + _path="test/path/1", + file_mode=FileMode.text, + path_type=PathType.hardlink, + prefix_placeholder="/opt/anaconda1anaconda2anaconda3", + ), + PathData(_path="test/path/2", no_link=True, path_type=PathType.hardlink), + PathData(_path="test/path/3", path_type=PathType.softlink), + PathData(_path="menu/test.json", path_type=PathType.hardlink), + ] paths_data = PathsData(paths_version=0, paths=paths) package_info = PackageInfo( - extracted_package_dir='/some/path', + extracted_package_dir="/some/path", package_tarball_full_path="/some/path.tar.bz2", - channel=Channel('defaults'), + channel=Channel("defaults"), repodata_record=index_json_record, - url='https://some.com/place/path.tar.bz2', - + url="https://some.com/place/path.tar.bz2", index_json_record=index_json_record, icondata=icondata, package_metadata=package_metadata, diff --git a/tests/models/test_prefix_graph.py b/tests/models/test_prefix_graph.py index 64fd7c8d499..5d9fd028261 100644 --- a/tests/models/test_prefix_graph.py +++ b/tests/models/test_prefix_graph.py @@ -1,26 +1,28 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import warnings - from functools import lru_cache from pprint import pprint +import pytest + +import conda.models.prefix_graph from conda.base.context import conda_tests_ctxt_mgmt_def_pol from conda.common.io import env_var from conda.exceptions import CyclicalDependencyError from conda.models.match_spec import MatchSpec -import conda.models.prefix_graph -from conda.models.prefix_graph import PrefixGraph, GeneralGraph +from conda.models.prefix_graph import GeneralGraph, PrefixGraph from conda.models.records import PackageRecord from conda.testing.helpers import add_subdir_to_iter, get_solver_4, get_solver_5 -import pytest - @lru_cache(maxsize=None) def get_conda_build_record_set(tmpdir): - specs = MatchSpec("conda"), MatchSpec("conda-build"), MatchSpec("intel-openmp"), + specs = ( + MatchSpec("conda"), + MatchSpec("conda-build"), + MatchSpec("intel-openmp"), + ) with get_solver_4(tmpdir, specs) as solver: final_state = solver.solve_final_state() return final_state, frozenset(specs) @@ -36,8 +38,13 @@ def get_pandas_record_set(tmpdir): @lru_cache(maxsize=None) def get_windows_conda_build_record_set(tmpdir): - specs = (MatchSpec("conda"), MatchSpec("conda-build"), MatchSpec("affine"), - MatchSpec("colour"), MatchSpec("uses-spiffy-test-app"),) + specs = ( + MatchSpec("conda"), + MatchSpec("conda-build"), + MatchSpec("affine"), + MatchSpec("colour"), + MatchSpec("uses-spiffy-test-app"), + ) with get_solver_5(tmpdir, specs) as solver: final_state = solver.solve_final_state() return final_state, frozenset(specs) @@ -46,7 +53,10 @@ def get_windows_conda_build_record_set(tmpdir): @lru_cache(maxsize=None) def get_sqlite_cyclical_record_set(tmpdir): # sqlite-3.20.1-haaaaaaa_4 - specs = MatchSpec("sqlite=3.20.1[build_number=4]"), MatchSpec("flask"), + specs = ( + MatchSpec("sqlite=3.20.1[build_number=4]"), + MatchSpec("flask"), + ) with get_solver_4(tmpdir, specs) as solver: final_state = solver.solve_final_state() return final_state, frozenset(specs) @@ -67,69 +77,69 @@ def test_prefix_graph_1(tmpdir): nodes = tuple(rec.name for rec in graph.records) pprint(nodes) order = ( - 'intel-openmp', - 'ca-certificates', - 'conda-env', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'patchelf', - 'tk', - 'xz', - 'yaml', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pysocks', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'cffi', - 'setuptools', - 'cryptography', - 'jinja2', - 'pyopenssl', - 'urllib3', - 'requests', - 'conda', - 'conda-build', + "intel-openmp", + "ca-certificates", + "conda-env", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "patchelf", + "tk", + "xz", + "yaml", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pysocks", + "pyyaml", + "ruamel_yaml", + "six", + "cffi", + "setuptools", + "cryptography", + "jinja2", + "pyopenssl", + "urllib3", + "requests", + "conda", + "conda-build", ) assert nodes == order - python_node = graph.get_node_by_name('python') + python_node = graph.get_node_by_name("python") python_ancestors = graph.all_ancestors(python_node) nodes = tuple(rec.name for rec in python_ancestors) pprint(nodes) order = ( - 'ca-certificates', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'tk', - 'xz', - 'zlib', - 'libedit', - 'readline', - 'sqlite', + "ca-certificates", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "tk", + "xz", + "zlib", + "libedit", + "readline", + "sqlite", ) assert nodes == order @@ -137,32 +147,32 @@ def test_prefix_graph_1(tmpdir): nodes = tuple(rec.name for rec in python_descendants) pprint(nodes) order = ( - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pysocks', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'cffi', - 'setuptools', - 'cryptography', - 'jinja2', - 'pyopenssl', - 'urllib3', - 'requests', - 'conda', - 'conda-build', + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pysocks", + "pyyaml", + "ruamel_yaml", + "six", + "cffi", + "setuptools", + "cryptography", + "jinja2", + "pyopenssl", + "urllib3", + "requests", + "conda", + "conda-build", ) assert nodes == order @@ -171,9 +181,9 @@ def test_prefix_graph_1(tmpdir): nodes = tuple(rec.name for rec in removed_nodes) pprint(nodes) order = ( - 'requests', - 'conda', - 'conda-build', + "requests", + "conda", + "conda-build", ) assert nodes == order @@ -183,46 +193,46 @@ def test_prefix_graph_1(tmpdir): nodes = tuple(rec.name for rec in graph.records) pprint(nodes) order = ( - 'conda-env', - 'intel-openmp', - 'ca-certificates', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'patchelf', - 'tk', - 'xz', - 'yaml', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pysocks', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'cffi', - 'setuptools', - 'cryptography', - 'jinja2', - 'pyopenssl', - 'urllib3', + "conda-env", + "intel-openmp", + "ca-certificates", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "patchelf", + "tk", + "xz", + "yaml", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pysocks", + "pyyaml", + "ruamel_yaml", + "six", + "cffi", + "setuptools", + "cryptography", + "jinja2", + "pyopenssl", + "urllib3", ) assert nodes == order @@ -232,7 +242,8 @@ def test_prefix_graph_1(tmpdir): } ) assert { - node.dist_str(): {str(ms) for ms in specs} for node, specs in graph.spec_matches.items() + node.dist_str(): {str(ms) for ms in specs} + for node, specs in graph.spec_matches.items() } == spec_matches removed_nodes = graph.prune() @@ -243,45 +254,45 @@ def test_prefix_graph_1(tmpdir): removed_nodes = tuple(rec.name for rec in removed_nodes) order = ( - 'conda-env', - 'ca-certificates', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'patchelf', - 'tk', - 'xz', - 'yaml', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pysocks', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'cffi', - 'setuptools', - 'cryptography', - 'jinja2', - 'pyopenssl', - 'urllib3', + "conda-env", + "ca-certificates", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "patchelf", + "tk", + "xz", + "yaml", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pysocks", + "pyyaml", + "ruamel_yaml", + "six", + "cffi", + "setuptools", + "cryptography", + "jinja2", + "pyopenssl", + "urllib3", ) pprint(removed_nodes) assert removed_nodes == order @@ -291,55 +302,55 @@ def test_prefix_graph_2(tmpdir): records, specs = get_conda_build_record_set(tmpdir) graph = PrefixGraph(records, specs) - conda_build_node = graph.get_node_by_name('conda-build') + conda_build_node = graph.get_node_by_name("conda-build") del graph.spec_matches[conda_build_node] nodes = tuple(rec.name for rec in graph.records) pprint(nodes) order = ( - 'intel-openmp', - 'ca-certificates', - 'conda-env', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'patchelf', - 'tk', - 'xz', - 'yaml', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pysocks', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'cffi', - 'setuptools', - 'cryptography', - 'jinja2', - 'pyopenssl', - 'urllib3', - 'requests', - 'conda', - 'conda-build', + "intel-openmp", + "ca-certificates", + "conda-env", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "patchelf", + "tk", + "xz", + "yaml", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pysocks", + "pyyaml", + "ruamel_yaml", + "six", + "cffi", + "setuptools", + "cryptography", + "jinja2", + "pyopenssl", + "urllib3", + "requests", + "conda", + "conda-build", ) assert nodes == order @@ -347,53 +358,53 @@ def test_prefix_graph_2(tmpdir): remaining_nodes = tuple(rec.name for rec in graph.records) pprint(remaining_nodes) order = ( - 'intel-openmp', - 'ca-certificates', - 'conda-env', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'tk', - 'xz', - 'yaml', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'idna', - 'pycosat', - 'pycparser', - 'pysocks', - 'ruamel_yaml', - 'six', - 'cffi', - 'cryptography', - 'pyopenssl', - 'urllib3', - 'requests', - 'conda', + "intel-openmp", + "ca-certificates", + "conda-env", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "tk", + "xz", + "yaml", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "certifi", + "chardet", + "cryptography-vectors", + "idna", + "pycosat", + "pycparser", + "pysocks", + "ruamel_yaml", + "six", + "cffi", + "cryptography", + "pyopenssl", + "urllib3", + "requests", + "conda", ) assert remaining_nodes == order order = ( - 'patchelf', - 'beautifulsoup4', - 'filelock', - 'glob2', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pyyaml', - 'setuptools', - 'jinja2', - 'conda-build', + "patchelf", + "beautifulsoup4", + "filelock", + "glob2", + "markupsafe", + "pkginfo", + "psutil", + "pyyaml", + "setuptools", + "jinja2", + "conda-build", ) removed_nodes = tuple(rec.name for rec in removed_nodes) pprint(removed_nodes) @@ -409,53 +420,53 @@ def test_remove_youngest_descendant_nodes_with_specs(tmpdir): remaining_nodes = tuple(rec.name for rec in graph.records) pprint(remaining_nodes) order = ( - 'ca-certificates', - 'conda-env', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'patchelf', - 'tk', - 'xz', - 'yaml', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pysocks', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'cffi', - 'setuptools', - 'cryptography', - 'jinja2', - 'pyopenssl', - 'urllib3', - 'requests', - 'conda', + "ca-certificates", + "conda-env", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "patchelf", + "tk", + "xz", + "yaml", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pysocks", + "pyyaml", + "ruamel_yaml", + "six", + "cffi", + "setuptools", + "cryptography", + "jinja2", + "pyopenssl", + "urllib3", + "requests", + "conda", ) assert remaining_nodes == order order = ( - 'intel-openmp', - 'conda-build', + "intel-openmp", + "conda-build", ) removed_nodes = tuple(rec.name for rec in removed_nodes) pprint(removed_nodes) @@ -467,52 +478,50 @@ def test_remove_youngest_descendant_nodes_with_specs(tmpdir): remaining_nodes = tuple(rec.name for rec in graph.records) pprint(remaining_nodes) order = ( - 'conda-env', - 'ca-certificates', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'patchelf', - 'tk', - 'xz', - 'yaml', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pysocks', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'cffi', - 'setuptools', - 'cryptography', - 'jinja2', - 'pyopenssl', - 'urllib3', - 'requests', + "conda-env", + "ca-certificates", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "patchelf", + "tk", + "xz", + "yaml", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pysocks", + "pyyaml", + "ruamel_yaml", + "six", + "cffi", + "setuptools", + "cryptography", + "jinja2", + "pyopenssl", + "urllib3", + "requests", ) assert remaining_nodes == order - order = ( - 'conda', - ) + order = ("conda",) removed_nodes = tuple(rec.name for rec in removed_nodes) pprint(removed_nodes) assert removed_nodes == order @@ -523,50 +532,50 @@ def test_remove_youngest_descendant_nodes_with_specs(tmpdir): remaining_nodes = tuple(rec.name for rec in graph.records) pprint(remaining_nodes) order = ( - 'ca-certificates', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'tk', - 'xz', - 'zlib', - 'libedit', - 'readline', - 'sqlite', - 'python', - 'asn1crypto', - 'certifi', - 'chardet', - 'cryptography-vectors', - 'idna', - 'pycparser', - 'pysocks', - 'six', - 'cffi', - 'cryptography', - 'pyopenssl', - 'urllib3', - 'requests', + "ca-certificates", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "tk", + "xz", + "zlib", + "libedit", + "readline", + "sqlite", + "python", + "asn1crypto", + "certifi", + "chardet", + "cryptography-vectors", + "idna", + "pycparser", + "pysocks", + "six", + "cffi", + "cryptography", + "pyopenssl", + "urllib3", + "requests", ) assert remaining_nodes == order order = ( - 'conda-env', - 'patchelf', - 'yaml', - 'beautifulsoup4', - 'filelock', - 'glob2', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pyyaml', - 'ruamel_yaml', - 'setuptools', - 'jinja2', + "conda-env", + "patchelf", + "yaml", + "beautifulsoup4", + "filelock", + "glob2", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pyyaml", + "ruamel_yaml", + "setuptools", + "jinja2", ) removed_nodes = tuple(rec.name for rec in removed_nodes) pprint(removed_nodes) @@ -586,49 +595,49 @@ def test_windows_sort_orders_1(tmpdir): nodes = tuple(rec.name for rec in graph.records) pprint(nodes) order = ( - 'ca-certificates', - 'conda-env', - 'vs2015_runtime', - 'vc', - 'openssl', - 'python', - 'yaml', - 'pywin32', - 'menuinst', # on_win, menuinst should be very early - 'affine', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'colour', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'win_inet_pton', - 'wincertstore', - 'cffi', - 'pysocks', - 'setuptools', - 'cryptography', - 'jinja2', - 'wheel', - 'pip', # pip always comes after python - 'pyopenssl', - 'urllib3', - 'requests', - 'conda', # on_win, conda comes before all noarch: python packages (affine, colour, spiffy-test-app, uses-spiffy-test-app) - 'conda-build', - 'spiffy-test-app', - 'uses-spiffy-test-app', + "ca-certificates", + "conda-env", + "vs2015_runtime", + "vc", + "openssl", + "python", + "yaml", + "pywin32", + "menuinst", # on_win, menuinst should be very early + "affine", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "colour", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pyyaml", + "ruamel_yaml", + "six", + "win_inet_pton", + "wincertstore", + "cffi", + "pysocks", + "setuptools", + "cryptography", + "jinja2", + "wheel", + "pip", # pip always comes after python + "pyopenssl", + "urllib3", + "requests", + "conda", # on_win, conda comes before all noarch: python packages (affine, colour, spiffy-test-app, uses-spiffy-test-app) + "conda-build", + "spiffy-test-app", + "uses-spiffy-test-app", ) assert nodes == order finally: @@ -639,64 +648,66 @@ def test_windows_sort_orders_2(tmpdir): # This test makes sure the windows-specific parts of _toposort_prepare_graph # are behaving correctly. - with env_var('CONDA_ALLOW_CYCLES', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_ALLOW_CYCLES", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): old_on_win = conda.models.prefix_graph.on_win conda.models.prefix_graph.on_win = False try: records, specs = get_windows_conda_build_record_set(tmpdir) graph = PrefixGraph(records, specs) - python_node = graph.get_node_by_name('python') - pip_node = graph.get_node_by_name('pip') + python_node = graph.get_node_by_name("python") + pip_node = graph.get_node_by_name("pip") assert pip_node in graph.graph[python_node] assert python_node in graph.graph[pip_node] nodes = tuple(rec.name for rec in graph.records) pprint(nodes) order = ( - 'ca-certificates', - 'conda-env', - 'vs2015_runtime', - 'vc', - 'openssl', - 'python', - 'yaml', - 'affine', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'colour', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'pywin32', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'spiffy-test-app', - 'win_inet_pton', - 'wincertstore', - 'cffi', - 'menuinst', # not on_win, menuinst isn't changed - 'pysocks', - 'setuptools', - 'uses-spiffy-test-app', - 'cryptography', - 'jinja2', - 'wheel', - 'pip', # pip always comes after python - 'pyopenssl', - 'urllib3', - 'requests', - 'conda', # not on_win, no special treatment for noarch: python packages (affine, colour, spiffy-test-app, uses-spiffy-test-app) - 'conda-build', + "ca-certificates", + "conda-env", + "vs2015_runtime", + "vc", + "openssl", + "python", + "yaml", + "affine", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "colour", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "pywin32", + "pyyaml", + "ruamel_yaml", + "six", + "spiffy-test-app", + "win_inet_pton", + "wincertstore", + "cffi", + "menuinst", # not on_win, menuinst isn't changed + "pysocks", + "setuptools", + "uses-spiffy-test-app", + "cryptography", + "jinja2", + "wheel", + "pip", # pip always comes after python + "pyopenssl", + "urllib3", + "requests", + "conda", # not on_win, no special treatment for noarch: python packages (affine, colour, spiffy-test-app, uses-spiffy-test-app) + "conda-build", ) assert nodes == order finally: @@ -707,65 +718,71 @@ def test_sort_without_prep(tmpdir, mocker): # Test the _toposort_prepare_graph method, here by not running it at all. # The method is invoked in every other test. This is what happens when it's not invoked. - with mocker.patch.object(conda.models.prefix_graph.PrefixGraph, '_toposort_prepare_graph', return_value=None): + with mocker.patch.object( + conda.models.prefix_graph.PrefixGraph, + "_toposort_prepare_graph", + return_value=None, + ): records, specs = get_windows_conda_build_record_set(tmpdir) graph = PrefixGraph(records, specs) - python_node = graph.get_node_by_name('python') - pip_node = graph.get_node_by_name('pip') + python_node = graph.get_node_by_name("python") + pip_node = graph.get_node_by_name("pip") assert pip_node in graph.graph[python_node] assert python_node in graph.graph[pip_node] nodes = tuple(rec.name for rec in graph.records) pprint(nodes) order = ( - 'ca-certificates', - 'conda-env', - 'vs2015_runtime', - 'vc', - 'openssl', - 'yaml', - 'affine', - 'asn1crypto', - 'beautifulsoup4', - 'certifi', - 'chardet', - 'colour', - 'cryptography-vectors', - 'filelock', - 'glob2', - 'idna', - 'markupsafe', - 'pkginfo', - 'psutil', - 'pycosat', - 'pycparser', - 'cffi', - 'python', - 'pywin32', - 'pyyaml', - 'ruamel_yaml', - 'six', - 'spiffy-test-app', - 'win_inet_pton', - 'wincertstore', - 'cryptography', - 'menuinst', - 'pysocks', - 'setuptools', - 'uses-spiffy-test-app', - 'jinja2', - 'pyopenssl', - 'wheel', - 'pip', - 'urllib3', - 'requests', - 'conda', - 'conda-build', + "ca-certificates", + "conda-env", + "vs2015_runtime", + "vc", + "openssl", + "yaml", + "affine", + "asn1crypto", + "beautifulsoup4", + "certifi", + "chardet", + "colour", + "cryptography-vectors", + "filelock", + "glob2", + "idna", + "markupsafe", + "pkginfo", + "psutil", + "pycosat", + "pycparser", + "cffi", + "python", + "pywin32", + "pyyaml", + "ruamel_yaml", + "six", + "spiffy-test-app", + "win_inet_pton", + "wincertstore", + "cryptography", + "menuinst", + "pysocks", + "setuptools", + "uses-spiffy-test-app", + "jinja2", + "pyopenssl", + "wheel", + "pip", + "urllib3", + "requests", + "conda", + "conda-build", ) assert nodes == order - with env_var('CONDA_ALLOW_CYCLES', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_ALLOW_CYCLES", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): records, specs = get_windows_conda_build_record_set(tmpdir) with pytest.raises(CyclicalDependencyError): graph = PrefixGraph(records, specs) @@ -798,43 +815,43 @@ def test_deep_cyclical_dependency(tmpdir): nodes = tuple(rec.name for rec in graph.records) pprint(nodes) order = ( - 'ca-certificates', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'tk', - 'xz', - 'zlib', - 'libedit', - 'readline', - 'certifi', - 'click', - 'itsdangerous', - 'markupsafe', - 'python', - 'setuptools', - 'werkzeug', - 'jinja2', - 'flask', - 'sqlite', # deep cyclical dependency; guess this is what we get + "ca-certificates", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "tk", + "xz", + "zlib", + "libedit", + "readline", + "certifi", + "click", + "itsdangerous", + "markupsafe", + "python", + "setuptools", + "werkzeug", + "jinja2", + "flask", + "sqlite", # deep cyclical dependency; guess this is what we get ) assert nodes == order # test remove spec # because of this deep cyclical dependency, removing jinja2 will remove sqlite and python expected_removal = ( - 'certifi', - 'click', - 'itsdangerous', - 'markupsafe', - 'python', - 'setuptools', - 'werkzeug', - 'jinja2', - 'flask', - 'sqlite', + "certifi", + "click", + "itsdangerous", + "markupsafe", + "python", + "setuptools", + "werkzeug", + "jinja2", + "flask", + "sqlite", ) removed_nodes = graph.remove_spec(MatchSpec("sqlite")) @@ -860,23 +877,20 @@ def test_deep_cyclical_dependency(tmpdir): pprint(removed_nodes) assert removed_nodes == expected_removal - graph = PrefixGraph(*get_sqlite_cyclical_record_set(tmpdir)) removed_nodes = graph.remove_youngest_descendant_nodes_with_specs() removed_nodes = tuple(rec.name for rec in removed_nodes) pprint(removed_nodes) - expected_removal = ( - 'flask', - ) + expected_removal = ("flask",) assert removed_nodes == expected_removal removed_nodes = graph.prune() removed_nodes = tuple(rec.name for rec in removed_nodes) pprint(removed_nodes) expected_removal = ( - 'click', - 'itsdangerous', - 'werkzeug', + "click", + "itsdangerous", + "werkzeug", ) assert removed_nodes == expected_removal @@ -888,30 +902,29 @@ def test_deep_cyclical_dependency(tmpdir): ) assert removed_nodes == expected_removal - graph = PrefixGraph(*get_sqlite_cyclical_record_set(tmpdir)) - markupsafe_node = graph.get_node_by_name('markupsafe') + markupsafe_node = graph.get_node_by_name("markupsafe") markupsafe_ancestors = graph.all_ancestors(markupsafe_node) nodes = tuple(rec.name for rec in markupsafe_ancestors) pprint(nodes) order = ( - 'ca-certificates', - 'libgcc-ng', - 'libstdcxx-ng', - 'libffi', - 'ncurses', - 'openssl', - 'tk', - 'xz', - 'zlib', - 'libedit', - 'readline', - 'certifi', - 'markupsafe', - 'python', - 'setuptools', - 'jinja2', - 'sqlite', + "ca-certificates", + "libgcc-ng", + "libstdcxx-ng", + "libffi", + "ncurses", + "openssl", + "tk", + "xz", + "zlib", + "libedit", + "readline", + "certifi", + "markupsafe", + "python", + "setuptools", + "jinja2", + "sqlite", ) assert nodes == order @@ -919,25 +932,29 @@ def test_deep_cyclical_dependency(tmpdir): nodes = tuple(rec.name for rec in markupsafe_descendants) pprint(nodes) order = ( - 'certifi', - 'click', - 'itsdangerous', - 'markupsafe', - 'python', - 'setuptools', - 'werkzeug', - 'jinja2', - 'flask', - 'sqlite', + "certifi", + "click", + "itsdangerous", + "markupsafe", + "python", + "setuptools", + "werkzeug", + "jinja2", + "flask", + "sqlite", ) assert nodes == order def test_general_graph_bfs_simple(): - a = PackageRecord(name="a", version="1", build="0", build_number=0, depends=["b", "c", "d"]) + a = PackageRecord( + name="a", version="1", build="0", build_number=0, depends=["b", "c", "d"] + ) b = PackageRecord(name="b", version="1", build="0", build_number=0, depends=["e"]) c = PackageRecord(name="c", version="1", build="0", build_number=0) - d = PackageRecord(name="d", version="1", build="0", build_number=0, depends=["f", "g"]) + d = PackageRecord( + name="d", version="1", build="0", build_number=0, depends=["f", "g"] + ) e = PackageRecord(name="e", version="1", build="0", build_number=0) f = PackageRecord(name="f", version="1", build="0", build_number=0) g = PackageRecord(name="g", version="1", build="0", build_number=0) @@ -961,10 +978,14 @@ def test_general_graph_bfs_simple(): def test_general_graph_bfs_version(): - a = PackageRecord(name="a", version="1", build="0", build_number=0, depends=["b", "c", "d"]) + a = PackageRecord( + name="a", version="1", build="0", build_number=0, depends=["b", "c", "d"] + ) b = PackageRecord(name="b", version="1", build="0", build_number=0, depends=["e"]) c = PackageRecord(name="c", version="1", build="0", build_number=0, depends=["g=1"]) - d = PackageRecord(name="d", version="1", build="0", build_number=0, depends=["f", "g=2"]) + d = PackageRecord( + name="d", version="1", build="0", build_number=0, depends=["f", "g=2"] + ) e = PackageRecord(name="e", version="1", build="0", build_number=0) f = PackageRecord(name="f", version="1", build="0", build_number=0) g1 = PackageRecord(name="g", version="1", build="0", build_number=0) diff --git a/tests/models/test_version.py b/tests/models/test_version.py index 1d0e5fbcf09..d34a7c477bb 100644 --- a/tests/models/test_version.py +++ b/tests/models/test_version.py @@ -1,79 +1,86 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - +import unittest from copy import copy from random import shuffle -import unittest + +import pytest from conda.exceptions import InvalidVersionSpec from conda.models.version import VersionOrder, VersionSpec, normalized_version, ver_eval -import pytest class TestVersionSpec(unittest.TestCase): - def test_version_order(self): versions = [ - ("0.4", [[0], [0], [4]]), - ("0.4.0", [[0], [0], [4], [0]]), - ("0.4.1a.vc11",[[0], [0], [4], [1, 'a'],[0, 'vc', 11]]), - ("0.4.1.rc", [[0], [0], [4], [1], [0, 'rc']]), - ("0.4.1.vc11", [[0], [0], [4], [1],[0, 'vc', 11]]), - ("0.4.1", [[0], [0], [4], [1]]), - ("0.5*", [[0], [0], [5, '*']]), - ("0.5a1", [[0], [0], [5, 'a', 1]]), - ("0.5b3", [[0], [0], [5, 'b', 3]]), - ("0.5C1", [[0], [0], [5, 'c', 1]]), - ("0.5z", [[0], [0], [5, 'z']]), - ("0.5za", [[0], [0], [5, 'za']]), - ("0.5", [[0], [0], [5]]), - ("0.5_5", [[0], [0], [5], [5]]), - ("0.5-5", [[0], [0], [5], [5]]), - ("0.9.6", [[0], [0], [9], [6]]), - ("0.960923", [[0], [0], [960923]]), - ("1.0", [[0], [1], [0]]), - ("1.0.4a3", [[0], [1], [0], [4, 'a', 3]]), - ("1.0.4b1", [[0], [1], [0], [4, 'b', 1]]), - ("1.0.4", [[0], [1], [0], [4]]), - ("1.1dev1", [[0], [1], [1, 'DEV', 1]]), - ("1.1_", [[0], [1], [1, '_']]), - ("1.1a1", [[0], [1], [1, 'a', 1]]), - ("1.1.dev1", [[0], [1], [1], [0, 'DEV', 1]]), - ("1.1.a1", [[0], [1], [1], [0, 'a', 1]]), - ("1.1", [[0], [1], [1]]), - ("1.1.post1", [[0], [1], [1], [0, float('inf'), 1]]), - ("1.1.1dev1", [[0], [1], [1], [1, 'DEV', 1]]), - ("1.1.1rc1", [[0], [1], [1], [1, 'rc', 1]]), - ("1.1.1", [[0], [1], [1], [1]]), - ("1.1.1post1", [[0], [1], [1], [1, float('inf'), 1]]), - ("1.1post1", [[0], [1], [1, float('inf'), 1]]), - ("2g6", [[0], [2, 'g', 6]]), - ("2.0b1pr0", [[0], [2], [0, 'b', 1, 'pr', 0]]), - ("2.2be.ta29", [[0], [2], [2, 'be'], [0, 'ta', 29]]), - ("2.2be5ta29", [[0], [2], [2, 'be', 5, 'ta', 29]]), - ("2.2beta29", [[0], [2], [2, 'beta', 29]]), - ("2.2.0.1", [[0], [2], [2],[0],[1]]), - ("3.1.1.6", [[0], [3], [1], [1], [6]]), - ("3.2.p.r0", [[0], [3], [2], [0, 'p'], [0, 'r', 0]]), - ("3.2.pr0", [[0], [3], [2], [0, 'pr', 0]]), - ("3.2.pr.1", [[0], [3], [2], [0, 'pr'], [1]]), - ("5.5.kw", [[0], [5], [5], [0, 'kw']]), - ("11g", [[0], [11, 'g']]), - ("14.3.1", [[0], [14], [3], [1]]), - ("14.3.1.post26.g9d75ca2", - [[0],[14],[3],[1],[0,float('inf'),26],[0,'g',9,'d',75,'ca',2]]), - ("1996.07.12", [[0], [1996], [7], [12]]), - ("1!0.4.1", [[1], [0], [4], [1]]), - ("1!3.1.1.6", [[1], [3], [1], [1], [6]]), - ("2!0.4.1", [[2], [0], [4], [1]]), + ("0.4", [[0], [0], [4]]), + ("0.4.0", [[0], [0], [4], [0]]), + ("0.4.1a.vc11", [[0], [0], [4], [1, "a"], [0, "vc", 11]]), + ("0.4.1.rc", [[0], [0], [4], [1], [0, "rc"]]), + ("0.4.1.vc11", [[0], [0], [4], [1], [0, "vc", 11]]), + ("0.4.1", [[0], [0], [4], [1]]), + ("0.5*", [[0], [0], [5, "*"]]), + ("0.5a1", [[0], [0], [5, "a", 1]]), + ("0.5b3", [[0], [0], [5, "b", 3]]), + ("0.5C1", [[0], [0], [5, "c", 1]]), + ("0.5z", [[0], [0], [5, "z"]]), + ("0.5za", [[0], [0], [5, "za"]]), + ("0.5", [[0], [0], [5]]), + ("0.5_5", [[0], [0], [5], [5]]), + ("0.5-5", [[0], [0], [5], [5]]), + ("0.9.6", [[0], [0], [9], [6]]), + ("0.960923", [[0], [0], [960923]]), + ("1.0", [[0], [1], [0]]), + ("1.0.4a3", [[0], [1], [0], [4, "a", 3]]), + ("1.0.4b1", [[0], [1], [0], [4, "b", 1]]), + ("1.0.4", [[0], [1], [0], [4]]), + ("1.1dev1", [[0], [1], [1, "DEV", 1]]), + ("1.1_", [[0], [1], [1, "_"]]), + ("1.1a1", [[0], [1], [1, "a", 1]]), + ("1.1.dev1", [[0], [1], [1], [0, "DEV", 1]]), + ("1.1.a1", [[0], [1], [1], [0, "a", 1]]), + ("1.1", [[0], [1], [1]]), + ("1.1.post1", [[0], [1], [1], [0, float("inf"), 1]]), + ("1.1.1dev1", [[0], [1], [1], [1, "DEV", 1]]), + ("1.1.1rc1", [[0], [1], [1], [1, "rc", 1]]), + ("1.1.1", [[0], [1], [1], [1]]), + ("1.1.1post1", [[0], [1], [1], [1, float("inf"), 1]]), + ("1.1post1", [[0], [1], [1, float("inf"), 1]]), + ("2g6", [[0], [2, "g", 6]]), + ("2.0b1pr0", [[0], [2], [0, "b", 1, "pr", 0]]), + ("2.2be.ta29", [[0], [2], [2, "be"], [0, "ta", 29]]), + ("2.2be5ta29", [[0], [2], [2, "be", 5, "ta", 29]]), + ("2.2beta29", [[0], [2], [2, "beta", 29]]), + ("2.2.0.1", [[0], [2], [2], [0], [1]]), + ("3.1.1.6", [[0], [3], [1], [1], [6]]), + ("3.2.p.r0", [[0], [3], [2], [0, "p"], [0, "r", 0]]), + ("3.2.pr0", [[0], [3], [2], [0, "pr", 0]]), + ("3.2.pr.1", [[0], [3], [2], [0, "pr"], [1]]), + ("5.5.kw", [[0], [5], [5], [0, "kw"]]), + ("11g", [[0], [11, "g"]]), + ("14.3.1", [[0], [14], [3], [1]]), + ( + "14.3.1.post26.g9d75ca2", + [ + [0], + [14], + [3], + [1], + [0, float("inf"), 26], + [0, "g", 9, "d", 75, "ca", 2], + ], + ), + ("1996.07.12", [[0], [1996], [7], [12]]), + ("1!0.4.1", [[1], [0], [4], [1]]), + ("1!3.1.1.6", [[1], [3], [1], [1], [6]]), + ("2!0.4.1", [[2], [0], [4], [1]]), ] # check parser versions = [(v, VersionOrder(v), expected) for v, expected in versions] for s, v, expected in versions: assert VersionOrder(v) is v - assert str(v) == s.lower().replace('-', '_') + assert str(v) == s.lower().replace("-", "_") self.assertEqual(v.version, expected) self.assertEqual(VersionOrder("0.4.1.rc"), VersionOrder(" 0.4.1.RC ")) self.assertEqual(normalized_version(" 0.4.1.RC "), VersionOrder("0.4.1.rc")) @@ -103,25 +110,28 @@ def test_version_order(self): self.assertFalse(VersionOrder("0.4.1+1").startswith(VersionOrder("0.4.1+1.3"))) def test_openssl_convention(self): - openssl = [VersionOrder(k) for k in ( - '1.0.1dev', - '1.0.1_', # <- this - '1.0.1a', - '1.0.1b', - '1.0.1c', - '1.0.1d', - '1.0.1r', - '1.0.1rc', - '1.0.1rc1', - '1.0.1rc2', - '1.0.1s', - '1.0.1', # <- compared to this - '1.0.1post.a', - '1.0.1post.b', - '1.0.1post.z', - '1.0.1post.za', - '1.0.2', - )] + openssl = [ + VersionOrder(k) + for k in ( + "1.0.1dev", + "1.0.1_", # <- this + "1.0.1a", + "1.0.1b", + "1.0.1c", + "1.0.1d", + "1.0.1r", + "1.0.1rc", + "1.0.1rc1", + "1.0.1rc2", + "1.0.1s", + "1.0.1", # <- compared to this + "1.0.1post.a", + "1.0.1post.b", + "1.0.1post.z", + "1.0.1post.za", + "1.0.2", + ) + ] shuffled = copy(openssl) shuffle(shuffled) assert sorted(shuffled) == openssl @@ -131,22 +141,59 @@ def test_pep440(self): # https://github.com/pypa/packaging/blob/master/tests/test_version.py) VERSIONS = [ # Implicit epoch of 0 - "1.0a1", "1.0a2.dev456", "1.0a12.dev456", "1.0a12", - "1.0b1.dev456", "1.0b2", "1.0b2.post345.dev456", "1.0b2.post345", - "1.0c1.dev456", "1.0c1", "1.0c3", "1.0rc2", "1.0.dev456", "1.0", - "1.0.post456.dev34", "1.0.post456", "1.1.dev1", - "1.2.r32+123456", "1.2.rev33+123456", - "1.2+abc", "1.2+abc123def", "1.2+abc123", - "1.2+123abc", "1.2+123abc456", "1.2+1234.abc", "1.2+123456", - + "1.0a1", + "1.0a2.dev456", + "1.0a12.dev456", + "1.0a12", + "1.0b1.dev456", + "1.0b2", + "1.0b2.post345.dev456", + "1.0b2.post345", + "1.0c1.dev456", + "1.0c1", + "1.0c3", + "1.0rc2", + "1.0.dev456", + "1.0", + "1.0.post456.dev34", + "1.0.post456", + "1.1.dev1", + "1.2.r32+123456", + "1.2.rev33+123456", + "1.2+abc", + "1.2+abc123def", + "1.2+abc123", + "1.2+123abc", + "1.2+123abc456", + "1.2+1234.abc", + "1.2+123456", # Explicit epoch of 1 - "1!1.0a1", "1!1.0a2.dev456", "1!1.0a12.dev456", "1!1.0a12", - "1!1.0b1.dev456", "1!1.0b2", "1!1.0b2.post345.dev456", "1!1.0b2.post345", - "1!1.0c1.dev456", "1!1.0c1", "1!1.0c3", "1!1.0rc2", "1!1.0.dev456", "1!1.0", - "1!1.0.post456.dev34", "1!1.0.post456", "1!1.1.dev1", - "1!1.2.r32+123456", "1!1.2.rev33+123456", - "1!1.2+abc", "1!1.2+abc123def", "1!1.2+abc123", - "1!1.2+123abc", "1!1.2+123abc456", "1!1.2+1234.abc", "1!1.2+123456", + "1!1.0a1", + "1!1.0a2.dev456", + "1!1.0a12.dev456", + "1!1.0a12", + "1!1.0b1.dev456", + "1!1.0b2", + "1!1.0b2.post345.dev456", + "1!1.0b2.post345", + "1!1.0c1.dev456", + "1!1.0c1", + "1!1.0c3", + "1!1.0rc2", + "1!1.0.dev456", + "1!1.0", + "1!1.0.post456.dev34", + "1!1.0.post456", + "1!1.1.dev1", + "1!1.2.r32+123456", + "1!1.2.rev33+123456", + "1!1.2+abc", + "1!1.2+abc123def", + "1!1.2+abc123", + "1!1.2+123abc", + "1!1.2+123abc456", + "1!1.2+1234.abc", + "1!1.2+123456", ] version = [VersionOrder(v) for v in VERSIONS] @@ -154,44 +201,44 @@ def test_pep440(self): self.assertEqual(version, sorted(version)) def test_hexrd(self): - VERSIONS = ['0.3.0.dev', '0.3.3'] + VERSIONS = ["0.3.0.dev", "0.3.3"] vos = [VersionOrder(v) for v in VERSIONS] self.assertEqual(sorted(vos), vos) def test_ver_eval(self): - self.assertEqual(ver_eval('1.7.0', '==1.7'), True) - self.assertEqual(ver_eval('1.7.0', '<=1.7'), True) - self.assertEqual(ver_eval('1.7.0', '<1.7'), False) - self.assertEqual(ver_eval('1.7.0', '>=1.7'), True) - self.assertEqual(ver_eval('1.7.0', '>1.7'), False) - self.assertEqual(ver_eval('1.6.7', '>=1.7'), False) - self.assertEqual(ver_eval('2013a', '>2013b'), False) - self.assertEqual(ver_eval('2013k', '>2013b'), True) - self.assertEqual(ver_eval('3.0.0', '>2013b'), False) - self.assertEqual(ver_eval('1.0.0', '>1.0.0a'), True) - self.assertEqual(ver_eval('1.0.0', '>1.0.0*'), True) - self.assertEqual(ver_eval('1.0', '1.0*'), True) - self.assertEqual(ver_eval('1.0.0', '1.0*'), True) - self.assertEqual(ver_eval('1.0', '1.0.0*'), True) - self.assertEqual(ver_eval('1.0.1', '1.0.0*'), False) - self.assertEqual(ver_eval('2013a', '2013a*'), True) - self.assertEqual(ver_eval('2013a', '2013b*'), False) - self.assertEqual(ver_eval('2013ab', '2013a*'), True) - self.assertEqual(ver_eval('1.3.4', '1.2.4*'), False) - self.assertEqual(ver_eval('1.2.3+4.5.6', '1.2.3*'), True) - self.assertEqual(ver_eval('1.2.3+4.5.6', '1.2.3+4*'), True) - self.assertEqual(ver_eval('1.2.3+4.5.6', '1.2.3+5*'), False) - self.assertEqual(ver_eval('1.2.3+4.5.6', '1.2.4+5*'), False) + self.assertEqual(ver_eval("1.7.0", "==1.7"), True) + self.assertEqual(ver_eval("1.7.0", "<=1.7"), True) + self.assertEqual(ver_eval("1.7.0", "<1.7"), False) + self.assertEqual(ver_eval("1.7.0", ">=1.7"), True) + self.assertEqual(ver_eval("1.7.0", ">1.7"), False) + self.assertEqual(ver_eval("1.6.7", ">=1.7"), False) + self.assertEqual(ver_eval("2013a", ">2013b"), False) + self.assertEqual(ver_eval("2013k", ">2013b"), True) + self.assertEqual(ver_eval("3.0.0", ">2013b"), False) + self.assertEqual(ver_eval("1.0.0", ">1.0.0a"), True) + self.assertEqual(ver_eval("1.0.0", ">1.0.0*"), True) + self.assertEqual(ver_eval("1.0", "1.0*"), True) + self.assertEqual(ver_eval("1.0.0", "1.0*"), True) + self.assertEqual(ver_eval("1.0", "1.0.0*"), True) + self.assertEqual(ver_eval("1.0.1", "1.0.0*"), False) + self.assertEqual(ver_eval("2013a", "2013a*"), True) + self.assertEqual(ver_eval("2013a", "2013b*"), False) + self.assertEqual(ver_eval("2013ab", "2013a*"), True) + self.assertEqual(ver_eval("1.3.4", "1.2.4*"), False) + self.assertEqual(ver_eval("1.2.3+4.5.6", "1.2.3*"), True) + self.assertEqual(ver_eval("1.2.3+4.5.6", "1.2.3+4*"), True) + self.assertEqual(ver_eval("1.2.3+4.5.6", "1.2.3+5*"), False) + self.assertEqual(ver_eval("1.2.3+4.5.6", "1.2.4+5*"), False) def test_ver_eval_errors(self): - self.assertRaises(InvalidVersionSpec, ver_eval, '3.0.0', '><2.4.5') - self.assertRaises(InvalidVersionSpec, ver_eval, '3.0.0', '!!2.4.5') - self.assertRaises(InvalidVersionSpec, ver_eval, '3.0.0', '!') + self.assertRaises(InvalidVersionSpec, ver_eval, "3.0.0", "><2.4.5") + self.assertRaises(InvalidVersionSpec, ver_eval, "3.0.0", "!!2.4.5") + self.assertRaises(InvalidVersionSpec, ver_eval, "3.0.0", "!") def test_version_spec_1(self): - v1 = VersionSpec('1.7.1') - v2 = VersionSpec('1.7.1*') - v3 = VersionSpec('1.7.1') + v1 = VersionSpec("1.7.1") + v2 = VersionSpec("1.7.1*") + v3 = VersionSpec("1.7.1") self.assertTrue(v1.is_exact()) self.assertFalse(v2.is_exact()) self.assertTrue(v1 == v3) @@ -204,16 +251,16 @@ def test_version_spec_1(self): self.assertNotEqual(hash(v1), hash(v2)) def test_version_spec_2(self): - v1 = VersionSpec('( (1.5|((1.6|1.7), 1.8), 1.9 |2.0))|2.1') - self.assertEqual(v1.spec, '1.5|1.6|1.7,1.8,1.9|2.0|2.1') - self.assertRaises(InvalidVersionSpec, VersionSpec, '(1.5') - self.assertRaises(InvalidVersionSpec, VersionSpec, '1.5)') - self.assertRaises(InvalidVersionSpec, VersionSpec, '1.5||1.6') - self.assertRaises(InvalidVersionSpec, VersionSpec, '^1.5') + v1 = VersionSpec("( (1.5|((1.6|1.7), 1.8), 1.9 |2.0))|2.1") + self.assertEqual(v1.spec, "1.5|1.6|1.7,1.8,1.9|2.0|2.1") + self.assertRaises(InvalidVersionSpec, VersionSpec, "(1.5") + self.assertRaises(InvalidVersionSpec, VersionSpec, "1.5)") + self.assertRaises(InvalidVersionSpec, VersionSpec, "1.5||1.6") + self.assertRaises(InvalidVersionSpec, VersionSpec, "^1.5") def test_version_spec_3(self): - v1 = VersionSpec('1.7.1*') - v2 = VersionSpec('1.7.1.*') + v1 = VersionSpec("1.7.1*") + v2 = VersionSpec("1.7.1.*") self.assertFalse(v1.is_exact()) self.assertFalse(v2.is_exact()) self.assertTrue(v1 == v2) @@ -221,9 +268,9 @@ def test_version_spec_3(self): self.assertEqual(hash(v1), hash(v2)) def test_version_spec_4(self): - v1 = VersionSpec('1.7.1*,1.8.1*') - v2 = VersionSpec('1.7.1.*,1.8.1.*') - v3 = VersionSpec('1.7.1*,1.8.1.*') + v1 = VersionSpec("1.7.1*,1.8.1*") + v2 = VersionSpec("1.7.1.*,1.8.1.*") + v3 = VersionSpec("1.7.1*,1.8.1.*") assert v1.is_exact() is False assert v2.is_exact() is False assert v1 == v2 == v3 @@ -232,35 +279,53 @@ def test_version_spec_4(self): def test_match(self): for vspec, res in ( - ('1.7.*', True), ('1.7.1', True), ('1.7.0', False), - ('1.7', False), ('1.5.*', False), ('>=1.5', True), - ('!=1.5', True), ('!=1.7.1', False), ('==1.7.1', True), - ('==1.7', False), ('==1.7.2', False), ('==1.7.1.0', True), - ('1.7.*|1.8.*', True), + ("1.7.*", True), + ("1.7.1", True), + ("1.7.0", False), + ("1.7", False), + ("1.5.*", False), + (">=1.5", True), + ("!=1.5", True), + ("!=1.7.1", False), + ("==1.7.1", True), + ("==1.7", False), + ("==1.7.2", False), + ("==1.7.1.0", True), + ("1.7.*|1.8.*", True), # ('1.8/*|1.9.*', False), what was this supposed to be? - ('>1.7,<1.8', True), ('>1.7.1,<1.8', False), - ('^1.7.1$', True), (r'^1\.7\.1$', True), (r'^1\.7\.[0-9]+$', True), - (r'^1\.8.*$', False), (r'^1\.[5-8]\.1$', True), (r'^[^1].*$', False), - (r'^[0-9+]+\.[0-9+]+\.[0-9]+$', True), ('^$', False), - ('^.*$', True), ('1.7.*|^0.*$', True), ('1.6.*|^0.*$', False), - ('1.6.*|^0.*$|1.7.1', True), ('^0.*$|1.7.1', True), - (r'1.6.*|^.*\.7\.1$|0.7.1', True), ('*', True), ('1.*.1', True), - ('1.5.*|>1.7,<1.8', True), ('1.5.*|>1.7,<1.7.1', False), + (">1.7,<1.8", True), + (">1.7.1,<1.8", False), + ("^1.7.1$", True), + (r"^1\.7\.1$", True), + (r"^1\.7\.[0-9]+$", True), + (r"^1\.8.*$", False), + (r"^1\.[5-8]\.1$", True), + (r"^[^1].*$", False), + (r"^[0-9+]+\.[0-9+]+\.[0-9]+$", True), + ("^$", False), + ("^.*$", True), + ("1.7.*|^0.*$", True), + ("1.6.*|^0.*$", False), + ("1.6.*|^0.*$|1.7.1", True), + ("^0.*$|1.7.1", True), + (r"1.6.*|^.*\.7\.1$|0.7.1", True), + ("*", True), + ("1.*.1", True), + ("1.5.*|>1.7,<1.8", True), + ("1.5.*|>1.7,<1.7.1", False), ): m = VersionSpec(vspec) assert VersionSpec(m) is m assert str(m) == vspec assert repr(m) == "VersionSpec('%s')" % vspec - assert m.match('1.7.1') == res, vspec + assert m.match("1.7.1") == res, vspec def test_local_identifier(self): """The separator for the local identifier should be either `.` or `+`""" # a valid versionstr should match itself versions = ( - '1.7.0' - '1.7.0.post123' - '1.7.0.post123.gabcdef9', - '1.7.0.post123+gabcdef9', + "1.7.0" "1.7.0.post123" "1.7.0.post123.gabcdef9", + "1.7.0.post123+gabcdef9", ) for version in versions: m = VersionSpec(version) @@ -288,13 +353,13 @@ def test_not_eq_star(self): assert not VersionSpec("!=3.3.*").match("3.3.0.0") def test_compound_versions(self): - vs = VersionSpec('>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*') - assert not vs.match('2.6.8') - assert vs.match('2.7.2') - assert not vs.match('3.3') - assert not vs.match('3.3.4') - assert vs.match('3.4') - assert vs.match('3.4a') + vs = VersionSpec(">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*") + assert not vs.match("2.6.8") + assert vs.match("2.7.2") + assert not vs.match("3.3") + assert not vs.match("3.3.4") + assert vs.match("3.4") + assert vs.match("3.4a") def test_invalid_version_specs(self): with pytest.raises(InvalidVersionSpec): diff --git a/tests/notices/test_core.py b/tests/notices/test_core.py index 54c907b35bb..0bd8fcdfef8 100644 --- a/tests/notices/test_core.py +++ b/tests/notices/test_core.py @@ -4,12 +4,11 @@ from conda.base.constants import NOTICES_DECORATOR_DISPLAY_INTERVAL from conda.notices import core as notices - from conda.testing.notices.helpers import ( - add_resp_to_mock, - notices_decorator_assert_message_in_stdout, DummyArgs, + add_resp_to_mock, get_test_notices, + notices_decorator_assert_message_in_stdout, offset_cache_file_mtime, ) @@ -99,7 +98,9 @@ def dummy(args, parser): dummy(dummy_args, None) captured = capsys.readouterr() - notices_decorator_assert_message_in_stdout(captured, messages=messages, dummy_mesg=dummy_mesg) + notices_decorator_assert_message_in_stdout( + captured, messages=messages, dummy_mesg=dummy_mesg + ) dummy(dummy_args, None) captured = capsys.readouterr() diff --git a/tests/notices/test_fetch.py b/tests/notices/test_fetch.py index 40f52400ae6..31f0a47e546 100644 --- a/tests/notices/test_fetch.py +++ b/tests/notices/test_fetch.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from unittest.mock import patch import requests diff --git a/tests/notices/test_types.py b/tests/notices/test_types.py index 352809a714d..0744a71a3b4 100644 --- a/tests/notices/test_types.py +++ b/tests/notices/test_types.py @@ -1,8 +1,6 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from conda.notices.types import ChannelNoticeResponse - from conda.testing.notices.helpers import get_test_notices diff --git a/tests/plugins/conftest.py b/tests/plugins/conftest.py index e7bf6a65649..aa1e32fa06e 100644 --- a/tests/plugins/conftest.py +++ b/tests/plugins/conftest.py @@ -1,12 +1,12 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -import pytest import sys -import conda.cli +import pytest -from conda.plugins.manager import CondaPluginManager +import conda.cli from conda.plugins.hookspec import CondaSpecs +from conda.plugins.manager import CondaPluginManager @pytest.fixture diff --git a/tests/plugins/data/test-plugin/test_plugin/plugin.py b/tests/plugins/data/test-plugin/test_plugin/plugin.py index 4ffa3815c73..1f43faaea74 100644 --- a/tests/plugins/data/test-plugin/test_plugin/plugin.py +++ b/tests/plugins/data/test-plugin/test_plugin/plugin.py @@ -2,14 +2,13 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright (C) 2022 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause -from conda import plugins -from conda.core.solve import Solver - - # this is where we simulate an ImportError # tested in test_manager.py::test_load_entrypoints_importerror import package_that_does_not_exist +from conda import plugins +from conda.core.solve import Solver + @plugins.hookimpl def conda_solvers(): diff --git a/tests/plugins/test_manager.py b/tests/plugins/test_manager.py index 6612ac5680d..0b5a4ea197b 100644 --- a/tests/plugins/test_manager.py +++ b/tests/plugins/test_manager.py @@ -6,12 +6,11 @@ import pytest +from conda import plugins from conda.core import solve from conda.exceptions import PluginError -from conda import plugins from conda.plugins import virtual_packages - log = logging.getLogger(__name__) @@ -75,7 +74,9 @@ def conda_virtual_packages(): def test_load_plugins_error(plugin_manager, mocker): - mocker.patch.object(plugin_manager, "register", side_effect=ValueError("load_plugins error")) + mocker.patch.object( + plugin_manager, "register", side_effect=ValueError("load_plugins error") + ) with pytest.raises(PluginError) as exc: plugin_manager.load_plugins(VerboseSolverPlugin) assert plugin_manager.get_plugins() == set() diff --git a/tests/plugins/test_solvers.py b/tests/plugins/test_solvers.py index d36a8a07222..be375644a68 100644 --- a/tests/plugins/test_solvers.py +++ b/tests/plugins/test_solvers.py @@ -5,14 +5,13 @@ import pytest +from conda import plugins from conda.base.context import context from conda.core import solve from conda.exceptions import PluginError -from conda import plugins from conda.plugins.hookspec import CondaSpecs from conda.plugins.manager import CondaPluginManager - log = logging.getLogger(__name__) @@ -59,7 +58,8 @@ def test_get_solver_backend(plugin_manager): def test_get_cached_solver_backend(plugin_manager, mocker): mocked = mocker.patch( - "conda.plugins.manager.CondaPluginManager.get_solver_backend", side_effect=classic_solver + "conda.plugins.manager.CondaPluginManager.get_solver_backend", + side_effect=classic_solver, ) plugin_manager = CondaPluginManager() plugin_manager.add_hookspecs(CondaSpecs) @@ -105,7 +105,9 @@ def test_duplicated(plugin_manager): plugin_manager.register(SolverPlugin()) plugin_manager.register(SolverPlugin()) - with pytest.raises(PluginError, match=re.escape("Conflicting `solvers` plugins found")): + with pytest.raises( + PluginError, match=re.escape("Conflicting `solvers` plugins found") + ): plugin_manager.get_solver_backend() @@ -138,5 +140,7 @@ def test_get_conflicting_solvers(plugin_manager): plugin_manager.register(SolverPlugin()) plugin_manager.register(SolverPlugin()) - with pytest.raises(PluginError, match=re.escape("Conflicting `solvers` plugins found")): + with pytest.raises( + PluginError, match=re.escape("Conflicting `solvers` plugins found") + ): plugin_manager.get_hook_results("solvers") diff --git a/tests/plugins/test_virtual_packages.py b/tests/plugins/test_virtual_packages.py index 1188b49e075..11cf405ecd1 100644 --- a/tests/plugins/test_virtual_packages.py +++ b/tests/plugins/test_virtual_packages.py @@ -119,7 +119,8 @@ def test_subdir_override(): virtual = [p for p in packages if p.channel.name == "@"] assert any(p.name == expected for p in virtual) assert not any( - (p.name in platform_virtual_packages and p.name != expected) for p in virtual + (p.name in platform_virtual_packages and p.name != expected) + for p in virtual ) diff --git a/tests/test_activate.py b/tests/test_activate.py index 256d2580752..7b9c1ab666c 100644 --- a/tests/test_activate.py +++ b/tests/test_activate.py @@ -1,25 +1,24 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import json +import os +import platform +import sys from functools import lru_cache from itertools import chain -import json from logging import getLogger -import os from os.path import dirname, isdir, join from pathlib import Path from re import escape from subprocess import CalledProcessError, check_output -import platform -import sys from tempfile import gettempdir from unittest import TestCase from uuid import uuid4 import pytest -from conda import __version__ as conda_version from conda import CONDA_PACKAGE_ROOT, CONDA_SOURCE_ROOT -from conda.auxlib.ish import dals +from conda import __version__ as conda_version from conda.activate import ( CmdExeActivator, CshActivator, @@ -27,17 +26,18 @@ PosixActivator, PowerShellActivator, XonshActivator, - activator_map, _build_activator_cls, + activator_map, native_path_to_unix, ) +from conda.auxlib.ish import dals from conda.base.constants import ( - ROOT_ENV_NAME, - PREFIX_STATE_FILE, - PACKAGE_ENV_VARS_DIR, CONDA_ENV_VARS_UNSET_VAR, + PACKAGE_ENV_VARS_DIR, + PREFIX_STATE_FILE, + ROOT_ENV_NAME, ) -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context from conda.cli.main import main_sourced from conda.common.compat import ensure_text_type, on_win from conda.common.io import captured, env_var, env_vars @@ -46,9 +46,13 @@ from conda.gateways.disk.create import mkdir_p from conda.gateways.disk.delete import rm_rf from conda.gateways.disk.update import touch - from conda.testing.helpers import tempdir -from conda.testing.integration import Commands, run_command, SPACER_CHARACTER, make_temp_env +from conda.testing.integration import ( + SPACER_CHARACTER, + Commands, + make_temp_env, + run_command, +) log = getLogger(__name__) @@ -60,29 +64,30 @@ # encounter some different code that ends up being run (some of the time). You will go slowly mad. # No, you are best off keeping --dev on the end of these. For sure, if conda bundled its own tests # module then we could remove --dev if we detect we are being run in that way. -dev_arg = '--dev' -activate_args = ['activate', dev_arg] -reactivate_args = ['reactivate', dev_arg] -deactivate_args = ['deactivate', dev_arg] +dev_arg = "--dev" +activate_args = ["activate", dev_arg] +reactivate_args = ["reactivate", dev_arg] +deactivate_args = ["deactivate", dev_arg] if on_win: import ctypes - PYTHONIOENCODING = 'cp' + str(ctypes.cdll.kernel32.GetACP()) + + PYTHONIOENCODING = "cp" + str(ctypes.cdll.kernel32.GetACP()) else: PYTHONIOENCODING = None POP_THESE = ( - 'CONDA_SHLVL', - 'CONDA_DEFAULT_ENV', - 'CONDA_PREFIX', - 'CONDA_PREFIX_0', - 'CONDA_PREFIX_1', - 'CONDA_PREFIX_2', - 'PS1', - 'prompt', + "CONDA_SHLVL", + "CONDA_DEFAULT_ENV", + "CONDA_PREFIX", + "CONDA_PREFIX_0", + "CONDA_PREFIX_1", + "CONDA_PREFIX_2", + "PS1", + "prompt", ) -ENV_VARS_FILE = ''' +ENV_VARS_FILE = """ { "version": 1, "env_vars": { @@ -91,22 +96,23 @@ "ENV_THREE": "me", "ENV_WITH_SAME_VALUE": "with_same_value" } -}''' +}""" -PKG_A_ENV_VARS = ''' +PKG_A_ENV_VARS = """ { "PKG_A_ENV": "yerp" } -''' +""" -PKG_B_ENV_VARS = ''' +PKG_B_ENV_VARS = """ { "PKG_B_ENV": "berp" } -''' +""" HDF5_VERSION = "1.12.1" + @lru_cache(maxsize=None) def bash_unsupported_because(): bash = which("bash") @@ -140,8 +146,10 @@ def bash_unsupported(): def bash_unsupported_win_because(): if on_win: - return "You are using Windows. These tests involve setting PATH to POSIX values\n" \ - "but our Python is a Windows program and Windows doesn't understand POSIX values." + return ( + "You are using Windows. These tests involve setting PATH to POSIX values\n" + "but our Python is a Windows program and Windows doesn't understand POSIX values." + ) return bash_unsupported_because() @@ -150,7 +158,6 @@ def bash_unsupported_win(): class ActivatorUnitTests(TestCase): - def setUp(self): self.hold_environ = os.environ.copy() for var in POP_THESE: @@ -176,10 +183,10 @@ def test_activate_environment_not_found(self): activator.build_activate(td) with pytest.raises(EnvironmentLocationNotFound): - activator.build_activate('/not/an/environment') + activator.build_activate("/not/an/environment") with pytest.raises(EnvironmentNameNotFound): - activator.build_activate('wontfindmeIdontexist_abc123') + activator.build_activate("wontfindmeIdontexist_abc123") def test_wrong_args(self): pass @@ -188,20 +195,30 @@ def test_activate_help(self): pass def test_PS1(self): - with env_var("CONDA_CHANGEPS1", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CHANGEPS1", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): activator = PosixActivator() - assert activator._prompt_modifier('/dont/matter', ROOT_ENV_NAME) == '(%s) ' % ROOT_ENV_NAME + assert ( + activator._prompt_modifier("/dont/matter", ROOT_ENV_NAME) + == "(%s) " % ROOT_ENV_NAME + ) instructions = activator.build_activate("base") - assert instructions['export_vars']['CONDA_PROMPT_MODIFIER'] == '(%s) ' % ROOT_ENV_NAME + assert ( + instructions["export_vars"]["CONDA_PROMPT_MODIFIER"] + == "(%s) " % ROOT_ENV_NAME + ) def test_PS1_no_changeps1(self): - with env_var("CONDA_CHANGEPS1", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CHANGEPS1", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): activator = PosixActivator() - assert activator._prompt_modifier('/dont/matter', 'root') == '' + assert activator._prompt_modifier("/dont/matter", "root") == "" instructions = activator.build_activate("base") - assert instructions['export_vars']['CONDA_PROMPT_MODIFIER'] == '' + assert instructions["export_vars"]["CONDA_PROMPT_MODIFIER"] == "" def test_add_prefix_to_path_posix(self): if on_win and "PWD" not in os.environ: @@ -209,42 +226,48 @@ def test_add_prefix_to_path_posix(self): activator = PosixActivator() - path_dirs = activator.path_conversion(['/path1/bin', '/path2/bin', '/usr/local/bin', '/usr/bin', '/bin']) + path_dirs = activator.path_conversion( + ["/path1/bin", "/path2/bin", "/usr/local/bin", "/usr/bin", "/bin"] + ) assert len(path_dirs) == 5 - test_prefix = '/usr/mytest/prefix' + test_prefix = "/usr/mytest/prefix" added_paths = activator.path_conversion(activator._get_path_dirs(test_prefix)) if isinstance(added_paths, str): - added_paths = added_paths, + added_paths = (added_paths,) new_path = activator._add_prefix_to_path(test_prefix, path_dirs) - condabin_dir = activator.path_conversion(os.path.join(context.conda_prefix, "condabin")) + condabin_dir = activator.path_conversion( + os.path.join(context.conda_prefix, "condabin") + ) assert new_path == added_paths + (condabin_dir,) + path_dirs @pytest.mark.skipif(not on_win, reason="windows-specific test") def test_add_prefix_to_path_cmdexe(self): activator = CmdExeActivator() - path_dirs = activator.path_conversion(["C:\\path1", "C:\\Program Files\\Git\\cmd", "C:\\WINDOWS\\system32"]) + path_dirs = activator.path_conversion( + ["C:\\path1", "C:\\Program Files\\Git\\cmd", "C:\\WINDOWS\\system32"] + ) assert len(path_dirs) == 3 - test_prefix = '/usr/mytest/prefix' + test_prefix = "/usr/mytest/prefix" added_paths = activator.path_conversion(activator._get_path_dirs(test_prefix)) if isinstance(added_paths, str): - added_paths = added_paths, + added_paths = (added_paths,) new_path = activator._add_prefix_to_path(test_prefix, path_dirs) - assert new_path[:len(added_paths)] == added_paths - assert new_path[-len(path_dirs):] == path_dirs + assert new_path[: len(added_paths)] == added_paths + assert new_path[-len(path_dirs) :] == path_dirs assert len(new_path) == len(added_paths) + len(path_dirs) + 1 assert new_path[len(added_paths)].endswith("condabin") def test_remove_prefix_from_path_1(self): activator = PosixActivator() original_path = tuple(activator._get_starting_path_list()) - keep_path = activator.path_conversion('/keep/this/path') + keep_path = activator.path_conversion("/keep/this/path") final_path = (keep_path,) + original_path final_path = activator.path_conversion(final_path) - test_prefix = join(os.getcwd(), 'mytestpath') + test_prefix = join(os.getcwd(), "mytestpath") new_paths = tuple(activator._get_path_dirs(test_prefix)) prefix_added_path = (keep_path,) + new_paths + original_path new_path = activator._remove_prefix_from_path(test_prefix, prefix_added_path) @@ -254,11 +277,11 @@ def test_remove_prefix_from_path_2(self): # this time prefix doesn't actually exist in path activator = PosixActivator() original_path = tuple(activator._get_starting_path_list()) - keep_path = activator.path_conversion('/keep/this/path') + keep_path = activator.path_conversion("/keep/this/path") final_path = (keep_path,) + original_path final_path = activator.path_conversion(final_path) - test_prefix = join(os.getcwd(), 'mytestpath') + test_prefix = join(os.getcwd(), "mytestpath") prefix_added_path = (keep_path,) + original_path new_path = activator._remove_prefix_from_path(test_prefix, prefix_added_path) @@ -267,18 +290,20 @@ def test_remove_prefix_from_path_2(self): def test_replace_prefix_in_path_1(self): activator = PosixActivator() original_path = tuple(activator._get_starting_path_list()) - new_prefix = join(os.getcwd(), 'mytestpath-new') + new_prefix = join(os.getcwd(), "mytestpath-new") new_paths = activator.path_conversion(activator._get_path_dirs(new_prefix)) if isinstance(new_paths, str): - new_paths = new_paths, - keep_path = activator.path_conversion('/keep/this/path') + new_paths = (new_paths,) + keep_path = activator.path_conversion("/keep/this/path") final_path = (keep_path,) + new_paths + original_path final_path = activator.path_conversion(final_path) - replace_prefix = join(os.getcwd(), 'mytestpath') + replace_prefix = join(os.getcwd(), "mytestpath") replace_paths = tuple(activator._get_path_dirs(replace_prefix)) prefix_added_path = (keep_path,) + replace_paths + original_path - new_path = activator._replace_prefix_in_path(replace_prefix, new_prefix, prefix_added_path) + new_path = activator._replace_prefix_in_path( + replace_prefix, new_prefix, prefix_added_path + ) assert final_path == new_path @@ -292,13 +317,15 @@ def test_replace_prefix_in_path_2(self): activator = CmdExeActivator() old_path = activator.pathsep_join(activator._add_prefix_to_path(path1)) old_path = one_more + ";" + old_path - with env_var('PATH', old_path): + with env_var("PATH", old_path): activator = PosixActivator() path_elements = activator._replace_prefix_in_path(path1, path2) old_path = native_path_to_unix(old_path.split(";")) assert path_elements[0] == native_path_to_unix(one_more) - assert path_elements[1] == native_path_to_unix(next(activator._get_path_dirs(path2))) + assert path_elements[1] == native_path_to_unix( + next(activator._get_path_dirs(path2)) + ) assert len(path_elements) == len(old_path) def test_default_env(self): @@ -308,19 +335,20 @@ def test_default_env(self): with tempdir() as td: assert td == activator._default_env(td) - p = mkdir_p(join(td, 'envs', 'named-env')) - assert 'named-env' == activator._default_env(p) + p = mkdir_p(join(td, "envs", "named-env")) + assert "named-env" == activator._default_env(p) def test_build_activate_dont_activate_unset_var(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - activate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + activate_d_dir = mkdir_p(join(td, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) - env_vars_file = ''' + env_vars_file = ( + """ { "version": 1, "env_vars": { @@ -328,21 +356,23 @@ def test_build_activate_dont_activate_unset_var(self): "ENV_TWO": "you", "ENV_THREE": "%s" } - }''' % CONDA_ENV_VARS_UNSET_VAR + }""" + % CONDA_ENV_VARS_UNSET_VAR + ) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(env_vars_file) self.write_pkg_env_vars(td) - with env_var('CONDA_SHLVL', '0'): - with env_var('CONDA_PREFIX', ''): + with env_var("CONDA_SHLVL", "0"): + with env_var("CONDA_PREFIX", ""): activator = PosixActivator() builder = activator.build_activate(td) new_path = activator.pathsep_join(activator._add_prefix_to_path(td)) conda_prompt_modifier = "(%s) " % td - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") unset_vars = [] set_vars = {"PS1": ps1} @@ -357,23 +387,27 @@ def test_build_activate_dont_activate_unset_var(self): "ENV_ONE": "one", "ENV_TWO": "you", } - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) - assert builder['unset_vars'] == unset_vars - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == () + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) + assert builder["unset_vars"] == unset_vars + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == () def test_build_activate_shlvl_warn_clobber_vars(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - activate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + activate_d_dir = mkdir_p(join(td, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) - env_vars_file = ''' + env_vars_file = """ { "version": 1, "env_vars": { @@ -382,21 +416,21 @@ def test_build_activate_shlvl_warn_clobber_vars(self): "ENV_THREE": "me", "PKG_A_ENV": "teamnope" } - }''' + }""" activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(env_vars_file) self.write_pkg_env_vars(td) - with env_var('CONDA_SHLVL', '0'): - with env_var('CONDA_PREFIX', ''): + with env_var("CONDA_SHLVL", "0"): + with env_var("CONDA_PREFIX", ""): activator = PosixActivator() builder = activator.build_activate(td) new_path = activator.pathsep_join(activator._add_prefix_to_path(td)) conda_prompt_modifier = "(%s) " % td - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") unset_vars = [] set_vars = {"PS1": ps1} @@ -412,35 +446,39 @@ def test_build_activate_shlvl_warn_clobber_vars(self): "ENV_TWO": "you", "ENV_THREE": "me", } - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) - assert builder['unset_vars'] == unset_vars - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == () + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) + assert builder["unset_vars"] == unset_vars + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == () def test_build_activate_shlvl_0(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - activate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + activate_d_dir = mkdir_p(join(td, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) self.write_pkg_env_vars(td) - with env_var('CONDA_SHLVL', '0'): - with env_var('CONDA_PREFIX', ''): + with env_var("CONDA_SHLVL", "0"): + with env_var("CONDA_PREFIX", ""): activator = PosixActivator() builder = activator.build_activate(td) new_path = activator.pathsep_join(activator._add_prefix_to_path(td)) conda_prompt_modifier = "(%s) " % td - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") unset_vars = [] set_vars = {"PS1": ps1} @@ -471,34 +509,39 @@ def test_build_activate_shlvl_0(self): @pytest.mark.skipif(bash_unsupported_win(), reason=bash_unsupported_win_because()) def test_build_activate_shlvl_1(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - activate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + activate_d_dir = mkdir_p(join(td, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) self.write_pkg_env_vars(td) - old_prefix = '/old/prefix' + old_prefix = "/old/prefix" activator = PosixActivator() old_path = activator.pathsep_join(activator._add_prefix_to_path(old_prefix)) - with env_vars({ - 'CONDA_SHLVL': '1', - 'CONDA_PREFIX': old_prefix, - 'PATH': old_path, - 'CONDA_ENV_PROMPT': '({default_env})', - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_SHLVL": "1", + "CONDA_PREFIX": old_prefix, + "PATH": old_path, + "CONDA_ENV_PROMPT": "({default_env})", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): activator = PosixActivator() builder = activator.build_activate(td) - new_path = activator.pathsep_join(activator._replace_prefix_in_path(old_prefix, td)) + new_path = activator.pathsep_join( + activator._replace_prefix_in_path(old_prefix, td) + ) conda_prompt_modifier = "(%s)" % td - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") assert activator.path_conversion(td) in new_path assert old_prefix not in new_path @@ -520,14 +563,18 @@ def test_build_activate_shlvl_1(self): "ENV_WITH_SAME_VALUE": "with_same_value", } export_vars, _ = activator.add_export_unset_vars(export_vars, None) - export_vars['CONDA_PREFIX_1'] = old_prefix - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) + export_vars["CONDA_PREFIX_1"] = old_prefix + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) - assert builder['unset_vars'] == unset_vars - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == () + assert builder["unset_vars"] == unset_vars + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == () with env_vars( { @@ -557,8 +604,8 @@ def test_build_activate_shlvl_1(self): "ENV_THREE", "ENV_WITH_SAME_VALUE", ] - assert builder['set_vars'] == { - 'PS1': '(/old/prefix)', + assert builder["set_vars"] == { + "PS1": "(/old/prefix)", } export_vars = { "CONDA_PREFIX": old_prefix, @@ -566,45 +613,52 @@ def test_build_activate_shlvl_1(self): "CONDA_DEFAULT_ENV": old_prefix, "CONDA_PROMPT_MODIFIER": "(%s)" % old_prefix, } - export_path = {'PATH': old_path,} - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) - assert builder['unset_vars'] == unset_vars - assert builder['export_vars'] == export_vars - assert builder['export_path'] == export_path - assert builder['activate_scripts'] == () - assert builder['deactivate_scripts'] == () + export_path = { + "PATH": old_path, + } + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) + assert builder["unset_vars"] == unset_vars + assert builder["export_vars"] == export_vars + assert builder["export_path"] == export_path + assert builder["activate_scripts"] == () + assert builder["deactivate_scripts"] == () @pytest.mark.skipif(bash_unsupported_win(), reason=bash_unsupported_win_because()) def test_build_stack_shlvl_1(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - activate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + activate_d_dir = mkdir_p(join(td, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) self.write_pkg_env_vars(td) - old_prefix = '/old/prefix' + old_prefix = "/old/prefix" activator = PosixActivator() old_path = activator.pathsep_join(activator._add_prefix_to_path(old_prefix)) - with env_vars({ - 'CONDA_SHLVL': '1', - 'CONDA_PREFIX': old_prefix, - 'PATH': old_path, - 'CONDA_ENV_PROMPT': '({default_env})', - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_SHLVL": "1", + "CONDA_PREFIX": old_prefix, + "PATH": old_path, + "CONDA_ENV_PROMPT": "({default_env})", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): activator = PosixActivator() builder = activator.build_stack(td) new_path = activator.pathsep_join(activator._add_prefix_to_path(td)) conda_prompt_modifier = "(%s)" % td - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") assert td in new_path assert old_prefix in new_path @@ -623,30 +677,36 @@ def test_build_stack_shlvl_1(self): "ENV_THREE": "me", "ENV_WITH_SAME_VALUE": "with_same_value", } - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, []) - export_vars['CONDA_PREFIX_1'] = old_prefix - export_vars['CONDA_STACKED_2'] = 'true' - - assert builder['unset_vars'] == unset_vars - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == () - - with env_vars({ - 'PATH': new_path, - 'CONDA_PREFIX': td, - 'CONDA_PREFIX_1': old_prefix, - 'CONDA_SHLVL': 2, - 'CONDA_DEFAULT_ENV': td, - 'CONDA_PROMPT_MODIFIER': conda_prompt_modifier, - 'CONDA_STACKED_2': 'true', - 'PKG_A_ENV': 'yerp', - 'PKG_B_ENV': 'berp', - 'ENV_ONE': 'one', - 'ENV_TWO': 'you', - 'ENV_THREE': 'me' - }): + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, [] + ) + export_vars["CONDA_PREFIX_1"] = old_prefix + export_vars["CONDA_STACKED_2"] = "true" + + assert builder["unset_vars"] == unset_vars + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == () + + with env_vars( + { + "PATH": new_path, + "CONDA_PREFIX": td, + "CONDA_PREFIX_1": old_prefix, + "CONDA_SHLVL": 2, + "CONDA_DEFAULT_ENV": td, + "CONDA_PROMPT_MODIFIER": conda_prompt_modifier, + "CONDA_STACKED_2": "true", + "PKG_A_ENV": "yerp", + "PKG_B_ENV": "berp", + "ENV_ONE": "one", + "ENV_TWO": "you", + "ENV_THREE": "me", + } + ): activator = PosixActivator() builder = activator.build_deactivate() @@ -660,8 +720,8 @@ def test_build_stack_shlvl_1(self): "ENV_THREE", "ENV_WITH_SAME_VALUE", ] - assert builder['set_vars'] == { - 'PS1': '(/old/prefix)', + assert builder["set_vars"] == { + "PS1": "(/old/prefix)", } export_vars = { "CONDA_PREFIX": old_prefix, @@ -669,37 +729,41 @@ def test_build_stack_shlvl_1(self): "CONDA_DEFAULT_ENV": old_prefix, "CONDA_PROMPT_MODIFIER": f"({old_prefix})", } - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) - assert builder['unset_vars'] == unset_vars - assert builder['export_vars'] == export_vars - assert builder['activate_scripts'] == () - assert builder['deactivate_scripts'] == () + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) + assert builder["unset_vars"] == unset_vars + assert builder["export_vars"] == export_vars + assert builder["activate_scripts"] == () + assert builder["deactivate_scripts"] == () def test_activate_same_environment(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - activate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + activate_d_dir = mkdir_p(join(td, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) old_prefix = td - deactivate_d_dir = mkdir_p(join(old_prefix, 'etc', 'conda', 'deactivate.d')) - deactivate_d_1 = join(deactivate_d_dir, 'see-me.sh') - deactivate_d_2 = join(deactivate_d_dir, 'dont-see-me.bat') + deactivate_d_dir = mkdir_p(join(old_prefix, "etc", "conda", "deactivate.d")) + deactivate_d_1 = join(deactivate_d_dir, "see-me.sh") + deactivate_d_2 = join(deactivate_d_dir, "dont-see-me.bat") touch(join(deactivate_d_1)) touch(join(deactivate_d_2)) - with env_var('CONDA_SHLVL', '1'): - with env_var('CONDA_PREFIX', old_prefix): + with env_var("CONDA_SHLVL", "1"): + with env_var("CONDA_PREFIX", old_prefix): activator = PosixActivator() builder = activator.build_activate(td) - new_path_parts = activator._replace_prefix_in_path(old_prefix, old_prefix) + new_path_parts = activator._replace_prefix_in_path( + old_prefix, old_prefix + ) conda_prompt_modifier = "(%s) " % old_prefix - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") set_vars = {"PS1": ps1} export_vars = { @@ -707,24 +771,28 @@ def test_activate_same_environment(self): "CONDA_SHLVL": 1, "CONDA_PROMPT_MODIFIER": "(%s) " % td, } - assert builder['unset_vars'] == () - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == (activator.path_conversion(deactivate_d_1),) + assert builder["unset_vars"] == () + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == ( + activator.path_conversion(deactivate_d_1), + ) @pytest.mark.skipif(bash_unsupported_win(), reason=bash_unsupported_win_because()) def test_build_deactivate_shlvl_2_from_stack(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - deactivate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'deactivate.d')) - deactivate_d_1 = join(deactivate_d_dir, 'see-me-deactivate.sh') - deactivate_d_2 = join(deactivate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + deactivate_d_dir = mkdir_p(join(td, "etc", "conda", "deactivate.d")) + deactivate_d_1 = join(deactivate_d_dir, "see-me-deactivate.sh") + deactivate_d_2 = join(deactivate_d_dir, "dont-see-me.bat") touch(join(deactivate_d_1)) touch(join(deactivate_d_2)) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) activate_pkg_env_vars_a = join(td, PACKAGE_ENV_VARS_DIR) @@ -732,17 +800,18 @@ def test_build_deactivate_shlvl_2_from_stack(self): with open(join(activate_pkg_env_vars_a, "pkg_a.json"), "w") as f: f.write(PKG_A_ENV_VARS) - old_prefix = join(td, 'old') - mkdir_p(join(old_prefix, 'conda-meta')) - activate_d_dir = mkdir_p(join(old_prefix, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me-activate.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + old_prefix = join(td, "old") + mkdir_p(join(old_prefix, "conda-meta")) + activate_d_dir = mkdir_p(join(old_prefix, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me-activate.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) activate_env_vars_old = join(old_prefix, PREFIX_STATE_FILE) - with open(activate_env_vars_old, 'w') as f: - f.write(''' + with open(activate_env_vars_old, "w") as f: + f.write( + """ { "version": 1, "env_vars": { @@ -750,32 +819,40 @@ def test_build_deactivate_shlvl_2_from_stack(self): "ENV_FIVE": "hive" } } - ''') + """ + ) activate_pkg_env_vars_b = join(old_prefix, PACKAGE_ENV_VARS_DIR) mkdir_p(activate_pkg_env_vars_b) with open(join(activate_pkg_env_vars_b, "pkg_b.json"), "w") as f: f.write(PKG_B_ENV_VARS) activator = PosixActivator() - original_path = activator.pathsep_join(activator._add_prefix_to_path(old_prefix)) - with env_var('PATH', original_path): + original_path = activator.pathsep_join( + activator._add_prefix_to_path(old_prefix) + ) + with env_var("PATH", original_path): activator = PosixActivator() - starting_path = activator.pathsep_join(activator._add_prefix_to_path(td)) - - with env_vars({ - 'CONDA_SHLVL': '2', - 'CONDA_PREFIX_1': old_prefix, - 'CONDA_PREFIX': td, - 'CONDA_STACKED_2': 'true', - 'PATH': starting_path, - 'ENV_ONE': 'one', - 'ENV_TWO': 'you', - 'ENV_THREE': 'me', - 'ENV_FOUR': 'roar', - 'ENV_FIVE': 'hive', - 'PKG_A_ENV': 'yerp', - 'PKG_B_ENV': 'berp', - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + starting_path = activator.pathsep_join( + activator._add_prefix_to_path(td) + ) + + with env_vars( + { + "CONDA_SHLVL": "2", + "CONDA_PREFIX_1": old_prefix, + "CONDA_PREFIX": td, + "CONDA_STACKED_2": "true", + "PATH": starting_path, + "ENV_ONE": "one", + "ENV_TWO": "you", + "ENV_THREE": "me", + "ENV_FOUR": "roar", + "ENV_FIVE": "hive", + "PKG_A_ENV": "yerp", + "PKG_B_ENV": "berp", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): activator = PosixActivator() builder = activator.build_deactivate() @@ -790,7 +867,7 @@ def test_build_deactivate_shlvl_2_from_stack(self): ] conda_prompt_modifier = "(%s) " % old_prefix - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") set_vars = {"PS1": ps1} export_vars = { @@ -802,27 +879,35 @@ def test_build_deactivate_shlvl_2_from_stack(self): "ENV_FOUR": "roar", "ENV_FIVE": "hive", } - export_path = {'PATH': original_path,} - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) - assert builder['unset_vars'] == unset_vars - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['export_path'] == export_path - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == (activator.path_conversion(deactivate_d_1),) + export_path = { + "PATH": original_path, + } + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) + assert builder["unset_vars"] == unset_vars + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["export_path"] == export_path + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == ( + activator.path_conversion(deactivate_d_1), + ) @pytest.mark.skipif(bash_unsupported_win(), reason=bash_unsupported_win_because()) def test_build_deactivate_shlvl_2_from_activate(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - deactivate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'deactivate.d')) - deactivate_d_1 = join(deactivate_d_dir, 'see-me-deactivate.sh') - deactivate_d_2 = join(deactivate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + deactivate_d_dir = mkdir_p(join(td, "etc", "conda", "deactivate.d")) + deactivate_d_1 = join(deactivate_d_dir, "see-me-deactivate.sh") + deactivate_d_2 = join(deactivate_d_dir, "dont-see-me.bat") touch(join(deactivate_d_1)) touch(join(deactivate_d_2)) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) activate_pkg_env_vars_a = join(td, PACKAGE_ENV_VARS_DIR) @@ -830,17 +915,18 @@ def test_build_deactivate_shlvl_2_from_activate(self): with open(join(activate_pkg_env_vars_a, "pkg_a.json"), "w") as f: f.write(PKG_A_ENV_VARS) - old_prefix = join(td, 'old') - mkdir_p(join(old_prefix, 'conda-meta')) - activate_d_dir = mkdir_p(join(old_prefix, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me-activate.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + old_prefix = join(td, "old") + mkdir_p(join(old_prefix, "conda-meta")) + activate_d_dir = mkdir_p(join(old_prefix, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me-activate.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) activate_env_vars_old = join(old_prefix, PREFIX_STATE_FILE) - with open(activate_env_vars_old, 'w') as f: - f.write(''' + with open(activate_env_vars_old, "w") as f: + f.write( + """ { "version": 1, "env_vars": { @@ -848,26 +934,32 @@ def test_build_deactivate_shlvl_2_from_activate(self): "ENV_FIVE": "hive" } } - ''') + """ + ) activate_pkg_env_vars_b = join(old_prefix, PACKAGE_ENV_VARS_DIR) mkdir_p(activate_pkg_env_vars_b) with open(join(activate_pkg_env_vars_b, "pkg_b.json"), "w") as f: f.write(PKG_B_ENV_VARS) activator = PosixActivator() - original_path = activator.pathsep_join(activator._add_prefix_to_path(old_prefix)) + original_path = activator.pathsep_join( + activator._add_prefix_to_path(old_prefix) + ) new_path = activator.pathsep_join(activator._add_prefix_to_path(td)) - with env_vars({ - 'CONDA_SHLVL': '2', - 'CONDA_PREFIX_1': old_prefix, - 'CONDA_PREFIX': td, - 'PATH': new_path, - 'ENV_ONE': 'one', - 'ENV_TWO': 'you', - 'ENV_THREE': 'me', - 'PKG_A_ENV': 'yerp', - 'PKG_B_ENV': 'berp', - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_SHLVL": "2", + "CONDA_PREFIX_1": old_prefix, + "CONDA_PREFIX": td, + "PATH": new_path, + "ENV_ONE": "one", + "ENV_TWO": "you", + "ENV_THREE": "me", + "PKG_A_ENV": "yerp", + "PKG_B_ENV": "berp", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): activator = PosixActivator() builder = activator.build_deactivate() @@ -881,7 +973,7 @@ def test_build_deactivate_shlvl_2_from_activate(self): ] conda_prompt_modifier = "(%s) " % old_prefix - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") set_vars = {"PS1": ps1} export_vars = { @@ -893,38 +985,48 @@ def test_build_deactivate_shlvl_2_from_activate(self): "ENV_FOUR": "roar", "ENV_FIVE": "hive", } - export_path = {'PATH': original_path,} - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) + export_path = { + "PATH": original_path, + } + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) - assert builder['unset_vars'] == unset_vars - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['export_path'] == export_path - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == (activator.path_conversion(deactivate_d_1),) + assert builder["unset_vars"] == unset_vars + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["export_path"] == export_path + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == ( + activator.path_conversion(deactivate_d_1), + ) def test_build_deactivate_shlvl_1(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - deactivate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'deactivate.d')) - deactivate_d_1 = join(deactivate_d_dir, 'see-me-deactivate.sh') - deactivate_d_2 = join(deactivate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + deactivate_d_dir = mkdir_p(join(td, "etc", "conda", "deactivate.d")) + deactivate_d_1 = join(deactivate_d_dir, "see-me-deactivate.sh") + deactivate_d_2 = join(deactivate_d_dir, "dont-see-me.bat") touch(join(deactivate_d_1)) touch(join(deactivate_d_2)) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) self.write_pkg_env_vars(td) - with env_var('CONDA_SHLVL', '1'): - with env_var('CONDA_PREFIX', td): + with env_var("CONDA_SHLVL", "1"): + with env_var("CONDA_PREFIX", td): activator = PosixActivator() original_path = tuple(activator._get_starting_path_list()) builder = activator.build_deactivate() - new_path = activator.pathsep_join(activator.path_conversion(original_path)) + new_path = activator.pathsep_join( + activator.path_conversion(original_path) + ) export_vars, unset_vars = activator.add_export_unset_vars( {"CONDA_SHLVL": 0}, [ @@ -952,27 +1054,31 @@ def test_get_env_vars_big_whitespace(self): with tempdir() as td: STATE_FILE = join(td, PREFIX_STATE_FILE) mkdir_p(dirname(STATE_FILE)) - with open(STATE_FILE, 'w') as f: - f.write(''' + with open(STATE_FILE, "w") as f: + f.write( + """ { "version": 1, "env_vars": { "ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me" - }}''') + }}""" + ) activator = PosixActivator() env_vars = activator._get_environment_env_vars(td) - assert env_vars == {'ENV_ONE':'one', 'ENV_TWO': 'you','ENV_THREE':'me'} + assert env_vars == {"ENV_ONE": "one", "ENV_TWO": "you", "ENV_THREE": "me"} def test_get_env_vars_empty_file(self): with tempdir() as td: - env_var_parent_dir = join(td, 'conda-meta') + env_var_parent_dir = join(td, "conda-meta") mkdir_p(env_var_parent_dir) - activate_env_vars = join(env_var_parent_dir, 'env_vars') - with open(activate_env_vars, 'w') as f: - f.write(''' - ''') + activate_env_vars = join(env_var_parent_dir, "env_vars") + with open(activate_env_vars, "w") as f: + f.write( + """ + """ + ) activator = PosixActivator() env_vars = activator._get_environment_env_vars(td) assert env_vars == {} @@ -980,36 +1086,41 @@ def test_get_env_vars_empty_file(self): @pytest.mark.skipif(bash_unsupported_win(), reason=bash_unsupported_win_because()) def test_build_activate_restore_unset_env_vars(self): with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - activate_d_dir = mkdir_p(join(td, 'etc', 'conda', 'activate.d')) - activate_d_1 = join(activate_d_dir, 'see-me.sh') - activate_d_2 = join(activate_d_dir, 'dont-see-me.bat') + mkdir_p(join(td, "conda-meta")) + activate_d_dir = mkdir_p(join(td, "etc", "conda", "activate.d")) + activate_d_1 = join(activate_d_dir, "see-me.sh") + activate_d_2 = join(activate_d_dir, "dont-see-me.bat") touch(join(activate_d_1)) touch(join(activate_d_2)) activate_env_vars = join(td, PREFIX_STATE_FILE) - with open(activate_env_vars, 'w') as f: + with open(activate_env_vars, "w") as f: f.write(ENV_VARS_FILE) self.write_pkg_env_vars(td) - old_prefix = '/old/prefix' + old_prefix = "/old/prefix" activator = PosixActivator() old_path = activator.pathsep_join(activator._add_prefix_to_path(old_prefix)) - with env_vars({ - 'CONDA_SHLVL': '1', - 'CONDA_PREFIX': old_prefix, - 'PATH': old_path, - 'CONDA_ENV_PROMPT': '({default_env})', - 'ENV_ONE': 'already_set_env_var', - 'ENV_WITH_SAME_VALUE': 'with_same_value' - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_SHLVL": "1", + "CONDA_PREFIX": old_prefix, + "PATH": old_path, + "CONDA_ENV_PROMPT": "({default_env})", + "ENV_ONE": "already_set_env_var", + "ENV_WITH_SAME_VALUE": "with_same_value", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): activator = PosixActivator() builder = activator.build_activate(td) - new_path = activator.pathsep_join(activator._replace_prefix_in_path(old_prefix, td)) + new_path = activator.pathsep_join( + activator._replace_prefix_in_path(old_prefix, td) + ) conda_prompt_modifier = "(%s)" % td - ps1 = conda_prompt_modifier + os.environ.get('PS1', '') + ps1 = conda_prompt_modifier + os.environ.get("PS1", "") assert activator.path_conversion(td) in new_path assert old_prefix not in new_path @@ -1032,44 +1143,50 @@ def test_build_activate_restore_unset_env_vars(self): "__CONDA_SHLVL_1_ENV_ONE": "already_set_env_var", } export_vars, _ = activator.add_export_unset_vars(export_vars, None) - export_vars['CONDA_PREFIX_1'] = old_prefix - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) - - assert builder['unset_vars'] == unset_vars - assert builder['set_vars'] == set_vars - assert builder['export_vars'] == export_vars - assert builder['activate_scripts'] == (activator.path_conversion(activate_d_1),) - assert builder['deactivate_scripts'] == () - - with env_vars({ - 'PATH': new_path, - 'CONDA_PREFIX': td, - 'CONDA_PREFIX_1': old_prefix, - 'CONDA_SHLVL': 2, - 'CONDA_DEFAULT_ENV': td, - 'CONDA_PROMPT_MODIFIER': conda_prompt_modifier, - '__CONDA_SHLVL_1_ENV_ONE': 'already_set_env_var', - 'PKG_B_ENV': 'berp', - 'PKG_A_ENV': 'yerp', - 'ENV_ONE': 'one', - 'ENV_TWO': 'you', - 'ENV_THREE': 'me', - 'ENV_WITH_SAME_VALUE': 'with_same_value' - }): + export_vars["CONDA_PREFIX_1"] = old_prefix + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) + + assert builder["unset_vars"] == unset_vars + assert builder["set_vars"] == set_vars + assert builder["export_vars"] == export_vars + assert builder["activate_scripts"] == ( + activator.path_conversion(activate_d_1), + ) + assert builder["deactivate_scripts"] == () + + with env_vars( + { + "PATH": new_path, + "CONDA_PREFIX": td, + "CONDA_PREFIX_1": old_prefix, + "CONDA_SHLVL": 2, + "CONDA_DEFAULT_ENV": td, + "CONDA_PROMPT_MODIFIER": conda_prompt_modifier, + "__CONDA_SHLVL_1_ENV_ONE": "already_set_env_var", + "PKG_B_ENV": "berp", + "PKG_A_ENV": "yerp", + "ENV_ONE": "one", + "ENV_TWO": "you", + "ENV_THREE": "me", + "ENV_WITH_SAME_VALUE": "with_same_value", + } + ): activator = PosixActivator() builder = activator.build_deactivate() unset_vars = [ - 'CONDA_PREFIX_1', - 'PKG_A_ENV', - 'PKG_B_ENV', - 'ENV_ONE', - 'ENV_TWO', - 'ENV_THREE', - "ENV_WITH_SAME_VALUE" + "CONDA_PREFIX_1", + "PKG_A_ENV", + "PKG_B_ENV", + "ENV_ONE", + "ENV_TWO", + "ENV_THREE", + "ENV_WITH_SAME_VALUE", ] - assert builder['set_vars'] == { - 'PS1': '(/old/prefix)', + assert builder["set_vars"] == { + "PS1": "(/old/prefix)", } export_vars = { "CONDA_PREFIX": old_prefix, @@ -1077,26 +1194,29 @@ def test_build_activate_restore_unset_env_vars(self): "CONDA_DEFAULT_ENV": old_prefix, "CONDA_PROMPT_MODIFIER": f"({old_prefix})", } - export_path = {'PATH': old_path, } - export_vars, unset_vars = activator.add_export_unset_vars(export_vars, unset_vars) - export_vars['ENV_ONE'] = 'already_set_env_var' - assert builder['unset_vars'] == unset_vars - assert builder['export_vars'] == export_vars - assert builder['export_path'] == export_path - assert builder['activate_scripts'] == () - assert builder['deactivate_scripts'] == () + export_path = { + "PATH": old_path, + } + export_vars, unset_vars = activator.add_export_unset_vars( + export_vars, unset_vars + ) + export_vars["ENV_ONE"] = "already_set_env_var" + assert builder["unset_vars"] == unset_vars + assert builder["export_vars"] == export_vars + assert builder["export_path"] == export_path + assert builder["activate_scripts"] == () + assert builder["deactivate_scripts"] == () class ShellWrapperUnitTests(TestCase): - def setUp(self): tempdirdir = gettempdir() prefix_dirname = str(uuid4())[:4] + SPACER_CHARACTER + str(uuid4())[:4] self.prefix = join(tempdirdir, prefix_dirname) - mkdir_p(join(self.prefix, 'conda-meta')) + mkdir_p(join(self.prefix, "conda-meta")) assert isdir(self.prefix) - touch(join(self.prefix, 'conda-meta', 'history')) + touch(join(self.prefix, "conda-meta", "history")) self.hold_environ = os.environ.copy() for var in POP_THESE: @@ -1108,24 +1228,26 @@ def tearDown(self): os.environ.update(self.hold_environ) def make_dot_d_files(self, extension): - mkdir_p(join(self.prefix, 'etc', 'conda', 'activate.d')) - mkdir_p(join(self.prefix, 'etc', 'conda', 'deactivate.d')) + mkdir_p(join(self.prefix, "etc", "conda", "activate.d")) + mkdir_p(join(self.prefix, "etc", "conda", "deactivate.d")) - touch(join(self.prefix, 'etc', 'conda', 'activate.d', 'ignore.txt')) - touch(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'ignore.txt')) + touch(join(self.prefix, "etc", "conda", "activate.d", "ignore.txt")) + touch(join(self.prefix, "etc", "conda", "deactivate.d", "ignore.txt")) - touch(join(self.prefix, 'etc', 'conda', 'activate.d', 'activate1' + extension)) - touch(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1' + extension)) + touch(join(self.prefix, "etc", "conda", "activate.d", "activate1" + extension)) + touch( + join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1" + extension) + ) def test_native_path_to_unix(self): def assert_unix_path(path): - assert '\\' not in path, path - assert ':' not in path, path + assert "\\" not in path, path + assert ":" not in path, path return True - path1 = join(self.prefix, 'path', 'number', 'one') - path2 = join(self.prefix, 'path', 'two') - path3 = join(self.prefix, 'three') + path1 = join(self.prefix, "path", "number", "one") + path2 = join(self.prefix, "path", "two") + path3 = join(self.prefix, "three") paths = (path1, path2, path3) if on_win: @@ -1212,7 +1334,13 @@ def test_posix_basic(self): join(self.prefix, "etc", "conda", "activate.d", "activate1.sh") ), "deactivate1": activator.path_conversion( - join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.sh") + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.sh", + ) ), "native_prefix": self.prefix, "new_path": activator.pathsep_join(new_path_parts), @@ -1227,10 +1355,17 @@ def test_posix_basic(self): assert rc == 0 deactivate_data = c.stdout - new_path = activator.pathsep_join(activator._remove_prefix_from_path(self.prefix)) - conda_exe_export, conda_exe_unset = activator.get_scripts_export_unset_vars() + new_path = activator.pathsep_join( + activator._remove_prefix_from_path(self.prefix) + ) + ( + conda_exe_export, + conda_exe_unset, + ) = activator.get_scripts_export_unset_vars() - e_deactivate_data = dals(""" + e_deactivate_data = ( + dals( + """ export PATH='%(new_path)s' . "%(deactivate1)s" %(conda_exe_unset)s @@ -1240,14 +1375,25 @@ def test_posix_basic(self): PS1='%(ps1)s' export CONDA_SHLVL='0' %(conda_exe_export)s - """) % { - 'new_path': new_path, - 'deactivate1': activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.sh')), - 'ps1': os.environ.get('PS1', ''), - 'conda_exe_unset': conda_exe_unset, - 'conda_exe_export': conda_exe_export, - } - assert deactivate_data == re.sub(r'\n\n+', '\n', e_deactivate_data) + """ + ) + % { + "new_path": new_path, + "deactivate1": activator.path_conversion( + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.sh", + ) + ), + "ps1": os.environ.get("PS1", ""), + "conda_exe_unset": conda_exe_unset, + "conda_exe_export": conda_exe_export, + } + ) + assert deactivate_data == re.sub(r"\n\n+", "\n", e_deactivate_data) @pytest.mark.skipif(not on_win, reason="cmd.exe only on Windows") def test_cmd_exe_basic(self): @@ -1328,7 +1474,13 @@ def test_cmd_exe_basic(self): join(self.prefix, "etc", "conda", "activate.d", "activate1.bat") ), "deactivate1": activator.path_conversion( - join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.bat") + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.bat", + ) ), "native_prefix": self.prefix, "new_path": activator.pathsep_join(new_path_parts), @@ -1344,8 +1496,12 @@ def test_cmd_exe_basic(self): deactivate_data = fh.read() rm_rf(deactivate_result) - new_path = activator.pathsep_join(activator._remove_prefix_from_path(self.prefix)) - e_deactivate_data = dals(""" + new_path = activator.pathsep_join( + activator._remove_prefix_from_path(self.prefix) + ) + e_deactivate_data = ( + dals( + """ @SET "PATH=%(new_path)s" @CALL "%(deactivate1)s" @SET CONDA_PREFIX= @@ -1353,11 +1509,22 @@ def test_cmd_exe_basic(self): @SET CONDA_PROMPT_MODIFIER= @SET "CONDA_SHLVL=0" %(conda_exe_export)s - """) % { - 'new_path': new_path, - 'deactivate1': activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.bat')), - 'conda_exe_export': conda_exe_export, - } + """ + ) + % { + "new_path": new_path, + "deactivate1": activator.path_conversion( + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.bat", + ) + ), + "conda_exe_export": conda_exe_export, + } + ) assert deactivate_data == e_deactivate_data def test_csh_basic(self): @@ -1433,7 +1600,13 @@ def test_csh_basic(self): join(self.prefix, "etc", "conda", "activate.d", "activate1.csh") ), "deactivate1": activator.path_conversion( - join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.csh") + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.csh", + ) ), "native_prefix": self.prefix, } @@ -1445,11 +1618,18 @@ def test_csh_basic(self): assert rc == 0 deactivate_data = c.stdout - new_path = activator.pathsep_join(activator._remove_prefix_from_path(self.prefix)) + new_path = activator.pathsep_join( + activator._remove_prefix_from_path(self.prefix) + ) - conda_exe_export, conda_exe_unset = activator.get_scripts_export_unset_vars() + ( + conda_exe_export, + conda_exe_unset, + ) = activator.get_scripts_export_unset_vars() - e_deactivate_data = dals(""" + e_deactivate_data = ( + dals( + """ setenv PATH "%(new_path)s"; source "%(deactivate1)s"; unsetenv CONDA_PREFIX; @@ -1458,12 +1638,23 @@ def test_csh_basic(self): set prompt='%(prompt)s'; setenv CONDA_SHLVL "0"; %(conda_exe_export)s; - """) % { - 'new_path': new_path, - 'deactivate1': activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.csh')), - 'prompt': os.environ.get('prompt', ''), - 'conda_exe_export': conda_exe_export, - } + """ + ) + % { + "new_path": new_path, + "deactivate1": activator.path_conversion( + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.csh", + ) + ), + "prompt": os.environ.get("prompt", ""), + "conda_exe_export": conda_exe_export, + } + ) assert deactivate_data == e_deactivate_data def test_xonsh_basic(self): @@ -1478,7 +1669,8 @@ def test_xonsh_basic(self): new_path_parts = activator._add_prefix_to_path(self.prefix) conda_exe_export, conda_exe_unset = activator.get_scripts_export_unset_vars() - e_activate_template = dals(""" + e_activate_template = dals( + """ $PATH = '%(new_path)s' $CONDA_PREFIX = '%(native_prefix)s' $CONDA_SHLVL = '1' @@ -1486,7 +1678,8 @@ def test_xonsh_basic(self): $CONDA_PROMPT_MODIFIER = '(%(native_prefix)s) ' %(conda_exe_export)s %(sourcer)s "%(activate1)s" - """) + """ + ) e_activate_info = { "converted_prefix": activator.path_conversion(self.prefix), "native_prefix": self.prefix, @@ -1532,17 +1725,25 @@ def test_xonsh_basic(self): """ ) e_reactivate_info = { - 'new_path': activator.pathsep_join(new_path_parts), - 'native_prefix': self.prefix, + "new_path": activator.pathsep_join(new_path_parts), + "native_prefix": self.prefix, } if on_win: - e_reactivate_info['sourcer'] = 'source-cmd --suppress-skip-message' - e_reactivate_info['activate1'] = activator.path_conversion(join(self.prefix, 'etc', 'conda', 'activate.d', 'activate1.bat')) - e_reactivate_info['deactivate1'] = activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.bat')) + e_reactivate_info["sourcer"] = "source-cmd --suppress-skip-message" + e_reactivate_info["activate1"] = activator.path_conversion( + join(self.prefix, "etc", "conda", "activate.d", "activate1.bat") + ) + e_reactivate_info["deactivate1"] = activator.path_conversion( + join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.bat") + ) else: - e_reactivate_info['sourcer'] = 'source-bash --suppress-skip-message' - e_reactivate_info['activate1'] = activator.path_conversion(join(self.prefix, 'etc', 'conda', 'activate.d', 'activate1.sh')) - e_reactivate_info['deactivate1'] = activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.sh')) + e_reactivate_info["sourcer"] = "source-bash --suppress-skip-message" + e_reactivate_info["activate1"] = activator.path_conversion( + join(self.prefix, "etc", "conda", "activate.d", "activate1.sh") + ) + e_reactivate_info["deactivate1"] = activator.path_conversion( + join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.sh") + ) e_reactivate_data = e_reactivate_template % e_reactivate_info assert reactivate_data == e_reactivate_data @@ -1552,9 +1753,15 @@ def test_xonsh_basic(self): assert rc == 0 deactivate_data = c.stdout - new_path = activator.pathsep_join(activator._remove_prefix_from_path(self.prefix)) - conda_exe_export, conda_exe_unset = activator.get_scripts_export_unset_vars() - e_deactivate_template = dals(""" + new_path = activator.pathsep_join( + activator._remove_prefix_from_path(self.prefix) + ) + ( + conda_exe_export, + conda_exe_unset, + ) = activator.get_scripts_export_unset_vars() + e_deactivate_template = dals( + """ $PATH = '%(new_path)s' %(sourcer)s "%(deactivate1)s" del $CONDA_PREFIX @@ -1562,17 +1769,22 @@ def test_xonsh_basic(self): del $CONDA_PROMPT_MODIFIER $CONDA_SHLVL = '0' %(conda_exe_export)s - """) + """ + ) e_deactivate_info = { - 'new_path': new_path, - 'conda_exe_export': conda_exe_export, + "new_path": new_path, + "conda_exe_export": conda_exe_export, } if on_win: - e_deactivate_info['sourcer'] = 'source-cmd --suppress-skip-message' - e_deactivate_info['deactivate1'] = activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.bat')) + e_deactivate_info["sourcer"] = "source-cmd --suppress-skip-message" + e_deactivate_info["deactivate1"] = activator.path_conversion( + join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.bat") + ) else: - e_deactivate_info['sourcer'] = 'source-bash --suppress-skip-message' - e_deactivate_info['deactivate1'] = activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.sh')) + e_deactivate_info["sourcer"] = "source-bash --suppress-skip-message" + e_deactivate_info["deactivate1"] = activator.path_conversion( + join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.sh") + ) e_deactivate_data = e_deactivate_template % e_deactivate_info assert deactivate_data == e_deactivate_data @@ -1641,10 +1853,18 @@ def test_fish_basic(self): % { "new_path": activator.pathsep_join(new_path_parts), "activate1": activator.path_conversion( - join(self.prefix, "etc", "conda", "activate.d", "activate1.fish") + join( + self.prefix, "etc", "conda", "activate.d", "activate1.fish" + ) ), "deactivate1": activator.path_conversion( - join(self.prefix, "etc", "conda", "deactivate.d", "deactivate1.fish") + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.fish", + ) ), "native_prefix": self.prefix, } @@ -1657,9 +1877,16 @@ def test_fish_basic(self): assert rc == 0 deactivate_data = c.stdout - new_path = activator.pathsep_join(activator._remove_prefix_from_path(self.prefix)) - conda_exe_export, conda_exe_unset = activator.get_scripts_export_unset_vars() - e_deactivate_data = dals(""" + new_path = activator.pathsep_join( + activator._remove_prefix_from_path(self.prefix) + ) + ( + conda_exe_export, + conda_exe_unset, + ) = activator.get_scripts_export_unset_vars() + e_deactivate_data = ( + dals( + """ set -gx PATH "%(new_path)s"; source "%(deactivate1)s"; set -e CONDA_PREFIX; @@ -1667,11 +1894,22 @@ def test_fish_basic(self): set -e CONDA_PROMPT_MODIFIER; set -gx CONDA_SHLVL "0"; %(conda_exe_export)s; - """) % { - 'new_path': new_path, - 'deactivate1': activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.fish')), - 'conda_exe_export': conda_exe_export, - } + """ + ) + % { + "new_path": new_path, + "deactivate1": activator.path_conversion( + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.fish", + ) + ), + "conda_exe_export": conda_exe_export, + } + ) assert deactivate_data == e_deactivate_data def test_powershell_basic(self): @@ -1702,7 +1940,9 @@ def test_powershell_basic(self): "prefix": self.prefix, "new_path": activator.pathsep_join(new_path_parts), "sys_executable": sys.executable, - "activate1": join(self.prefix, "etc", "conda", "activate.d", "activate1.ps1"), + "activate1": join( + self.prefix, "etc", "conda", "activate.d", "activate1.ps1" + ), "conda_exe_export": conda_exe_export, } ) @@ -1735,7 +1975,9 @@ def test_powershell_basic(self): """ ) % { - "activate1": join(self.prefix, "etc", "conda", "activate.d", "activate1.ps1"), + "activate1": join( + self.prefix, "etc", "conda", "activate.d", "activate1.ps1" + ), "deactivate1": join( self.prefix, "etc", "conda", "deactivate.d", "deactivate1.ps1" ), @@ -1750,9 +1992,14 @@ def test_powershell_basic(self): assert rc == 0 deactivate_data = c.stdout - new_path = activator.pathsep_join(activator._remove_prefix_from_path(self.prefix)) + new_path = activator.pathsep_join( + activator._remove_prefix_from_path(self.prefix) + ) - assert deactivate_data == dals(""" + assert ( + deactivate_data + == dals( + """ $Env:PATH = "%(new_path)s" . "%(deactivate1)s" $Env:CONDA_PREFIX = "" @@ -1760,11 +2007,16 @@ def test_powershell_basic(self): $Env:CONDA_PROMPT_MODIFIER = "" $Env:CONDA_SHLVL = "0" %(conda_exe_export)s - """) % { - 'new_path': new_path, - 'deactivate1': join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.ps1'), - 'conda_exe_export': conda_exe_export, - } + """ + ) + % { + "new_path": new_path, + "deactivate1": join( + self.prefix, "etc", "conda", "deactivate.d", "deactivate1.ps1" + ), + "conda_exe_export": conda_exe_export, + } + ) def test_unicode(self): shell = "shell.posix" @@ -1778,7 +2030,7 @@ def test_unicode(self): main_sourced(shell, *activate_args, self.prefix) def test_json_basic(self): - activator = _build_activator_cls('posix+json')() + activator = _build_activator_cls("posix+json")() self.make_dot_d_files(activator.script_extension) with captured() as c: @@ -1799,7 +2051,7 @@ def test_json_basic(self): CONDA_SHLVL=1, CONDA_DEFAULT_ENV=self.prefix, CONDA_PROMPT_MODIFIER="(%s) " % self.prefix, - **conda_exe_export + **conda_exe_export, ), "set": { "PS1": "(%s) " % self.prefix, @@ -1847,9 +2099,29 @@ def test_json_basic(self): "unset": [], }, "scripts": { - "activate": [activator.path_conversion(join(self.prefix, 'etc', 'conda', 'activate.d', 'activate1.sh')),], - "deactivate": [activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.sh')),], - } + "activate": [ + activator.path_conversion( + join( + self.prefix, + "etc", + "conda", + "activate.d", + "activate1.sh", + ) + ), + ], + "deactivate": [ + activator.path_conversion( + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.sh", + ) + ), + ], + }, } assert json.loads(reactivate_data) == e_reactivate_data @@ -1859,8 +2131,13 @@ def test_json_basic(self): assert rc == 0 deactivate_data = c.stdout - new_path = activator.pathsep_join(activator._remove_prefix_from_path(self.prefix)) - conda_exe_export, conda_exe_unset = activator.get_scripts_export_unset_vars() + new_path = activator.pathsep_join( + activator._remove_prefix_from_path(self.prefix) + ) + ( + conda_exe_export, + conda_exe_unset, + ) = activator.get_scripts_export_unset_vars() e_deactivate_data = { "path": { "PATH": list(new_path), @@ -1868,14 +2145,28 @@ def test_json_basic(self): "vars": { "export": dict(CONDA_SHLVL=0, **conda_exe_export), "set": { - "PS1": '', + "PS1": "", }, - "unset": ['CONDA_PREFIX', 'CONDA_DEFAULT_ENV', 'CONDA_PROMPT_MODIFIER'], + "unset": [ + "CONDA_PREFIX", + "CONDA_DEFAULT_ENV", + "CONDA_PROMPT_MODIFIER", + ], }, "scripts": { "activate": [], - "deactivate": [activator.path_conversion(join(self.prefix, 'etc', 'conda', 'deactivate.d', 'deactivate1.sh')),], - } + "deactivate": [ + activator.path_conversion( + join( + self.prefix, + "etc", + "conda", + "deactivate.d", + "deactivate1.sh", + ) + ), + ], + }, } assert json.loads(deactivate_data) == e_deactivate_data @@ -1886,26 +2177,30 @@ class InteractiveShell: print_env_var = None from conda.utils import quote_for_shell - exe_quoted = quote_for_shell(sys.executable.replace("\\", "/") if on_win else sys.executable) + exe_quoted = quote_for_shell( + sys.executable.replace("\\", "/") if on_win else sys.executable + ) shells = { - 'posix': { - 'activator': 'posix', + "posix": { + "activator": "posix", # 'init_command': 'env | sort && mount && which {0} && {0} -V && echo "$({0} -m conda shell.posix hook)" && eval "$({0} -m conda shell.posix hook)"'.format('/c/Users/rdonnelly/mc/python.exe'), # sys.executable.replace('\\', '/')), # 'init_command': 'env | sort && echo "$({0} -m conda shell.posix hook)" && eval "$({0} -m conda shell.posix hook)"'.format(self. # '/c/Users/rdonnelly/mc/python.exe'), # sys.executable.replace('\\', '/')), - 'init_command': ('env | sort && echo "$({0} -m conda shell.posix hook {1})" && ' - 'eval "$({0} -m conda shell.posix hook {1})" && env | sort' - .format(exe_quoted, dev_arg)), - - 'print_env_var': 'echo "$%s"', + "init_command": ( + 'env | sort && echo "$({0} -m conda shell.posix hook {1})" && ' + 'eval "$({0} -m conda shell.posix hook {1})" && env | sort'.format( + exe_quoted, dev_arg + ) + ), + "print_env_var": 'echo "$%s"', }, - 'bash': { + "bash": { # MSYS2's login scripts handle mounting the filesystem. Without it, /c is /cygdrive. "args": ("-l",) if on_win else (), "base_shell": "posix", # inheritance implemented in __init__ }, - 'dash': { - 'base_shell': 'posix', # inheritance implemented in __init__ + "dash": { + "base_shell": "posix", # inheritance implemented in __init__ }, "zsh": { "base_shell": "posix", # inheritance implemented in __init__ @@ -1916,41 +2211,41 @@ class InteractiveShell: # It should be noted here that we use the latest hook with whatever conda.exe is installed # in sys.prefix (and we will activate all of those PATH entries). We will set PYTHONPATH # though, so there is that. What I'm getting at is that this is a huge mixup and a mess. - 'cmd.exe': { - 'activator': 'cmd.exe', - -# For non-dev-mode you'd have: -# 'init_command': 'set "CONDA_SHLVL=" ' -# '&& @CALL {}\\shell\\condabin\\conda_hook.bat {} ' -# '&& set CONDA_EXE={}' -# '&& set _CE_M=' -# '&& set _CE_CONDA=' -# .format(CONDA_PACKAGE_ROOT, dev_arg, -# join(sys.prefix, "Scripts", "conda.exe")), - - 'init_command': 'set "CONDA_SHLVL=" ' - '&& @CALL {}\\shell\\condabin\\conda_hook.bat {}' - '&& set CONDA_EXE={}' - '&& set _CE_M=-m' - '&& set _CE_CONDA=conda'.format(CONDA_PACKAGE_ROOT, dev_arg, - sys.executable), - - 'print_env_var': '@echo %%%s%%', + "cmd.exe": { + "activator": "cmd.exe", + # For non-dev-mode you'd have: + # 'init_command': 'set "CONDA_SHLVL=" ' + # '&& @CALL {}\\shell\\condabin\\conda_hook.bat {} ' + # '&& set CONDA_EXE={}' + # '&& set _CE_M=' + # '&& set _CE_CONDA=' + # .format(CONDA_PACKAGE_ROOT, dev_arg, + # join(sys.prefix, "Scripts", "conda.exe")), + "init_command": 'set "CONDA_SHLVL=" ' + "&& @CALL {}\\shell\\condabin\\conda_hook.bat {}" + "&& set CONDA_EXE={}" + "&& set _CE_M=-m" + "&& set _CE_CONDA=conda".format( + CONDA_PACKAGE_ROOT, dev_arg, sys.executable + ), + "print_env_var": "@echo %%%s%%", }, - 'csh': { - 'activator': 'csh', + "csh": { + "activator": "csh", # Trying to use -x with `tcsh` on `macOS` results in some problems: # This error from `PyCharm`: # BrokenPipeError: [Errno 32] Broken pipe (writing to self.proc.stdin). # .. and this one from the `macOS` terminal: # pexpect.exceptions.EOF: End Of File (EOF). # 'args': ('-x',), - 'init_command': 'set _CONDA_EXE=\"{CPR}/shell/bin/conda\"; ' - 'source {CPR}/shell/etc/profile.d/conda.csh; '.format(CPR=CONDA_PACKAGE_ROOT), - 'print_env_var': 'echo "$%s"', + "init_command": 'set _CONDA_EXE="{CPR}/shell/bin/conda"; ' + "source {CPR}/shell/etc/profile.d/conda.csh; ".format( + CPR=CONDA_PACKAGE_ROOT + ), + "print_env_var": 'echo "$%s"', }, - 'tcsh': { - 'base_shell': 'csh', + "tcsh": { + "base_shell": "csh", }, "fish": { "activator": "fish", @@ -1959,31 +2254,28 @@ class InteractiveShell: }, # We don't know if the PowerShell executable is called # powershell, pwsh, or pwsh-preview. - 'powershell': { - 'activator': 'powershell', - 'args': ('-NoProfile', '-NoLogo'), - 'init_command': '{} -m conda shell.powershell hook --dev | Out-String | Invoke-Expression'\ - .format(sys.executable), - 'print_env_var': '$Env:%s', - 'exit_cmd': 'exit' - }, - 'pwsh': { - 'base_shell': 'powershell' - }, - 'pwsh-preview': { - 'base_shell': 'powershell' + "powershell": { + "activator": "powershell", + "args": ("-NoProfile", "-NoLogo"), + "init_command": "{} -m conda shell.powershell hook --dev | Out-String | Invoke-Expression".format( + sys.executable + ), + "print_env_var": "$Env:%s", + "exit_cmd": "exit", }, + "pwsh": {"base_shell": "powershell"}, + "pwsh-preview": {"base_shell": "powershell"}, } def __init__(self, shell_name): self.shell_name = shell_name - base_shell = self.shells[shell_name].get('base_shell') + base_shell = self.shells[shell_name].get("base_shell") shell_vals = self.shells.get(base_shell, {}).copy() shell_vals.update(self.shells[shell_name]) for key, value in shell_vals.items(): setattr(self, key, value) - self.activator = activator_map[shell_vals['activator']]() - self.exit_cmd = self.shells[shell_name].get('exit_cmd', None) + self.activator = activator_map[shell_vals["activator"]]() + self.exit_cmd = self.shells[shell_name].get("exit_cmd", None) def __enter__(self): from pexpect.popen_spawn import PopenSpawn @@ -1993,10 +2285,11 @@ def __enter__(self): os.environ["MSYS2_PATH_TYPE"] = "inherit" os.environ["CHERE_INVOKING"] = "1" env = {str(k): str(v) for k, v in os.environ.items()} - remove_these = {var_name for var_name in env if var_name.startswith('CONDA_')} + remove_these = {var_name for var_name in env if var_name.startswith("CONDA_")} for var_name in remove_these: del env[var_name] from conda.utils import quote_for_shell + # 1. shell='cmd.exe' is deliberate. We are not, at this time, running bash, we # are launching it (from `cmd.exe` most likely). # 2. For some reason, passing just self.shell_name (which is `bash`) runs WSL @@ -2017,7 +2310,11 @@ def __enter__(self): ) # set state for context - joiner = os.pathsep.join if self.shell_name == "fish" else self.activator.pathsep_join + joiner = ( + os.pathsep.join + if self.shell_name == "fish" + else self.activator.pathsep_join + ) PATH = joiner( self.activator.path_conversion( ( @@ -2055,6 +2352,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): if self.exit_cmd: self.sendline(self.exit_cmd) import signal + self.p.kill(signal.SIGINT) def sendline(self, s): @@ -2069,20 +2367,20 @@ def assert_env_var(self, env_var, value, use_exact=False): try: if use_exact: self.p.expect_exact(value) - self.expect('.*\n') + self.expect(".*\n") else: - self.expect('%s\n' % value) + self.expect("%s\n" % value) except: print(self.p.before) print(self.p.after) raise def get_env_var(self, env_var, default=None): - if self.shell_name == 'cmd.exe': + if self.shell_name == "cmd.exe": self.sendline("@echo %%%s%%" % env_var) self.expect("@echo %%%s%%\r\n([^\r]*)\r" % env_var) value = self.p.match.groups()[0] - elif self.shell_name in ('powershell', 'pwsh'): + elif self.shell_name in ("powershell", "pwsh"): self.sendline(self.print_env_var % env_var) if platform.system() == "Windows": # The \r\n\( is the newline after the env var and the start of the prompt. @@ -2093,16 +2391,17 @@ def get_env_var(self, env_var, default=None): self.expect(f"\\$Env:{env_var}\n") value = self.p.readline() else: - self.sendline('echo get_var_start') + self.sendline("echo get_var_start") self.sendline(self.print_env_var % env_var) - self.sendline('echo get_var_end') - self.expect('get_var_start\n') + self.sendline("echo get_var_end") + self.expect("get_var_start\n") value = self.p.readline() - self.expect('get_var_end') + self.expect("get_var_end") if value is None: return default return ensure_text_type(value).strip() + def which_powershell(): r""" Since we don't know whether PowerShell is installed as powershell, pwsh, or pwsh-preview, @@ -2114,26 +2413,26 @@ def which_powershell(): E.g.: ('pwsh', r'C:\Program Files\PowerShell\6.0.2\pwsh.exe) """ if on_win: - posh = which('powershell.exe') + posh = which("powershell.exe") if posh: - return 'powershell', posh + return "powershell", posh - posh = which('pwsh') + posh = which("pwsh") if posh: - return 'pwsh', posh + return "pwsh", posh - posh = which('pwsh-preview') + posh = which("pwsh-preview") if posh: - return 'pwsh-preview', posh + return "pwsh-preview", posh + @pytest.mark.integration class ShellWrapperIntegrationTests(TestCase): - @classmethod def setUpClass(cls): try: - mkdir_p(join(sys.prefix, 'conda-meta')) - touch(join(sys.prefix, 'conda-meta', 'history')) + mkdir_p(join(sys.prefix, "conda-meta")) + touch(join(sys.prefix, "conda-meta", "history")) except Exception: pass @@ -2142,39 +2441,39 @@ def setUp(self): prefix_dirname = str(uuid4())[:4] + SPACER_CHARACTER + str(uuid4())[:4] self.prefix = join(tempdirdir, prefix_dirname) - mkdir_p(join(self.prefix, 'conda-meta')) + mkdir_p(join(self.prefix, "conda-meta")) assert isdir(self.prefix) - touch(join(self.prefix, 'conda-meta', 'history')) + touch(join(self.prefix, "conda-meta", "history")) - self.prefix2 = join(self.prefix, 'envs', 'charizard') - mkdir_p(join(self.prefix2, 'conda-meta')) - touch(join(self.prefix2, 'conda-meta', 'history')) + self.prefix2 = join(self.prefix, "envs", "charizard") + mkdir_p(join(self.prefix2, "conda-meta")) + touch(join(self.prefix2, "conda-meta", "history")) - self.prefix3 = join(self.prefix, 'envs', 'venusaur') - mkdir_p(join(self.prefix3, 'conda-meta')) - touch(join(self.prefix3, 'conda-meta', 'history')) + self.prefix3 = join(self.prefix, "envs", "venusaur") + mkdir_p(join(self.prefix3, "conda-meta")) + touch(join(self.prefix3, "conda-meta", "history")) # We can engineer ourselves out of having `git` on PATH if we install # it via conda, so, when we have no git on PATH, install this. Yes it # is variable, but at least it is not slow. - if not which('git') or which('git').startswith(sys.prefix): - log.warning("Installing `git` into {} because during these tests" - "`conda` uses `git` to get its version, and the git" - "found on `PATH` on this system seems to be part of" - "a conda env. They stack envs which means that the" - "the original sys.prefix conda env falls off of it." - .format(sys.prefix)) + if not which("git") or which("git").startswith(sys.prefix): + log.warning( + "Installing `git` into {} because during these tests" + "`conda` uses `git` to get its version, and the git" + "found on `PATH` on this system seems to be part of" + "a conda env. They stack envs which means that the" + "the original sys.prefix conda env falls off of it.".format(sys.prefix) + ) run_command(Commands.INSTALL, self.prefix3, "git") def tearDown(self): rm_rf(self.prefix) def basic_posix(self, shell): - if shell.shell_name in ("zsh", "dash"): conda_is_a_function = "conda is a shell function" else: - conda_is_a_function = 'conda is a function' + conda_is_a_function = "conda is a function" activate = f" activate {dev_arg} " deactivate = f" deactivate {dev_arg} " @@ -2186,113 +2485,131 @@ def basic_posix(self, shell): prefix2_p = activator.path_conversion(self.prefix2) activator.path_conversion(self.prefix3) - PATH0 = shell.get_env_var('PATH', '') + PATH0 = shell.get_env_var("PATH", "") assert any(p.endswith("condabin") for p in PATH0.split(":")) # calling bash -l, as we do for MSYS2, may cause conda activation. - shell.sendline('conda deactivate') - shell.sendline('conda deactivate') - shell.sendline('conda deactivate') - shell.sendline('conda deactivate') - shell.expect('.*\n') - - shell.assert_env_var('CONDA_SHLVL', '0') - PATH0 = shell.get_env_var('PATH', '') + shell.sendline("conda deactivate") + shell.sendline("conda deactivate") + shell.sendline("conda deactivate") + shell.sendline("conda deactivate") + shell.expect(".*\n") + + shell.assert_env_var("CONDA_SHLVL", "0") + PATH0 = shell.get_env_var("PATH", "") assert len([p for p in PATH0.split(":") if p.endswith("condabin")]) > 0 # Remove sys.prefix from PATH. It interferes with path entry count tests. # We can no longer check this since we'll replace e.g. between 1 and N path # entries with N of them in _replace_prefix_in_path() now. It is debatable # whether it should be here at all too. - if PATH0.startswith(activator.path_conversion(sys.prefix) + ':'): - PATH0=PATH0[len(activator.path_conversion(sys.prefix))+1:] + if PATH0.startswith(activator.path_conversion(sys.prefix) + ":"): + PATH0 = PATH0[len(activator.path_conversion(sys.prefix)) + 1 :] shell.sendline(f'export PATH="{PATH0}"') - PATH0 = shell.get_env_var('PATH', '') + PATH0 = shell.get_env_var("PATH", "") shell.sendline("type conda") shell.expect(conda_is_a_function) - _CE_M = shell.get_env_var('_CE_M') - _CE_CONDA = shell.get_env_var('_CE_CONDA') + _CE_M = shell.get_env_var("_CE_M") + _CE_CONDA = shell.get_env_var("_CE_CONDA") shell.sendline("conda --version") shell.p.expect_exact("conda " + conda_version) - shell.sendline('conda' + activate + 'base') + shell.sendline("conda" + activate + "base") shell.sendline("type conda") shell.expect(conda_is_a_function) - CONDA_EXE2 = shell.get_env_var('CONDA_EXE') - _CE_M2 = shell.get_env_var('_CE_M') + CONDA_EXE2 = shell.get_env_var("CONDA_EXE") + _CE_M2 = shell.get_env_var("_CE_M") - shell.assert_env_var('PS1', '(base).*') - shell.assert_env_var('CONDA_SHLVL', '1') - PATH1 = shell.get_env_var('PATH', '') - assert len(PATH0.split(':')) + num_paths_added == len(PATH1.split(':')) + shell.assert_env_var("PS1", "(base).*") + shell.assert_env_var("CONDA_SHLVL", "1") + PATH1 = shell.get_env_var("PATH", "") + assert len(PATH0.split(":")) + num_paths_added == len(PATH1.split(":")) - CONDA_EXE = shell.get_env_var('CONDA_EXE') - _CE_M = shell.get_env_var('_CE_M') - _CE_CONDA = shell.get_env_var('_CE_CONDA') + CONDA_EXE = shell.get_env_var("CONDA_EXE") + _CE_M = shell.get_env_var("_CE_M") + _CE_CONDA = shell.get_env_var("_CE_CONDA") log.debug("activating ..") - shell.sendline('conda' + activate + '"%s"' % prefix_p) + shell.sendline("conda" + activate + '"%s"' % prefix_p) shell.sendline("type conda") shell.expect(conda_is_a_function) - CONDA_EXE2 = shell.get_env_var('CONDA_EXE') - _CE_M2 = shell.get_env_var('_CE_M') - _CE_CONDA2 = shell.get_env_var('_CE_CONDA') - assert CONDA_EXE == CONDA_EXE2, "CONDA_EXE changed by activation procedure\n:From\n{}\nto:\n{}".\ - format(CONDA_EXE, CONDA_EXE2) - assert _CE_M2 == _CE_M2, "_CE_M changed by activation procedure\n:From\n{}\nto:\n{}".\ - format(_CE_M, _CE_M2) - assert _CE_CONDA == _CE_CONDA2, "_CE_CONDA changed by activation procedure\n:From\n{}\nto:\n{}".\ - format(_CE_CONDA, _CE_CONDA2) - - shell.sendline('env | sort') + CONDA_EXE2 = shell.get_env_var("CONDA_EXE") + _CE_M2 = shell.get_env_var("_CE_M") + _CE_CONDA2 = shell.get_env_var("_CE_CONDA") + assert ( + CONDA_EXE == CONDA_EXE2 + ), "CONDA_EXE changed by activation procedure\n:From\n{}\nto:\n{}".format( + CONDA_EXE, CONDA_EXE2 + ) + assert ( + _CE_M2 == _CE_M2 + ), "_CE_M changed by activation procedure\n:From\n{}\nto:\n{}".format( + _CE_M, _CE_M2 + ) + assert ( + _CE_CONDA == _CE_CONDA2 + ), "_CE_CONDA changed by activation procedure\n:From\n{}\nto:\n{}".format( + _CE_CONDA, _CE_CONDA2 + ) + + shell.sendline("env | sort") # When CONDA_SHLVL==2 fails it usually means that conda activate failed. We that fails it is # usually because you forgot to pass `--dev` to the *previous* activate so CONDA_EXE changed # from python to conda, which is then found on PATH instead of using the dev sources. When it # goes to use this old conda to generate the activation script for the newly activated env. # it is running the old code (or at best, a mix of new code and old scripts). - shell.assert_env_var('CONDA_SHLVL', '2') - CONDA_PREFIX = shell.get_env_var('CONDA_PREFIX', '') + shell.assert_env_var("CONDA_SHLVL", "2") + CONDA_PREFIX = shell.get_env_var("CONDA_PREFIX", "") # We get C: vs c: differences on Windows. # Also, self.prefix instead of prefix_p is deliberate (maybe unfortunate?) assert CONDA_PREFIX.lower() == self.prefix.lower() - PATH2 = shell.get_env_var('PATH', '') - assert len(PATH0.split(':')) + num_paths_added == len(PATH2.split(':')) - - shell.sendline('env | sort | grep CONDA') - shell.expect('CONDA_') - shell.sendline("echo \"PATH=$PATH\"") - shell.expect('PATH=') - shell.sendline('conda' + activate + '"%s"' % prefix2_p) - shell.sendline('env | sort | grep CONDA') - shell.expect('CONDA_') - shell.sendline("echo \"PATH=$PATH\"") - shell.expect('PATH=') - shell.assert_env_var('PS1', '(charizard).*') - shell.assert_env_var('CONDA_SHLVL', '3') - PATH3 = shell.get_env_var('PATH') - assert len(PATH0.split(':')) + num_paths_added == len(PATH3.split(':')) - - CONDA_EXE2 = shell.get_env_var('CONDA_EXE') - _CE_M2 = shell.get_env_var('_CE_M') - _CE_CONDA2 = shell.get_env_var('_CE_CONDA') - assert CONDA_EXE == CONDA_EXE2, "CONDA_EXE changed by stacked activation procedure\n:From\n{}\nto:\n{}".\ - format(CONDA_EXE, CONDA_EXE2) - assert _CE_M2 == _CE_M2, "_CE_M changed by stacked activation procedure\n:From\n{}\nto:\n{}".\ - format(_CE_M, _CE_M2) - assert _CE_CONDA == _CE_CONDA2, "_CE_CONDA stacked changed by activation procedure\n:From\n{}\nto:\n{}".\ - format(_CE_CONDA, _CE_CONDA2) - - shell.sendline('conda' + install + f'-yq hdf5={HDF5_VERSION}') - shell.expect('Executing transaction: ...working... done.*\n', timeout=60) - shell.assert_env_var('?', '0', use_exact=True) - - shell.sendline('h5stat --version') - shell.expect(fr'.*h5stat: Version {HDF5_VERSION}.*') + PATH2 = shell.get_env_var("PATH", "") + assert len(PATH0.split(":")) + num_paths_added == len(PATH2.split(":")) + + shell.sendline("env | sort | grep CONDA") + shell.expect("CONDA_") + shell.sendline('echo "PATH=$PATH"') + shell.expect("PATH=") + shell.sendline("conda" + activate + '"%s"' % prefix2_p) + shell.sendline("env | sort | grep CONDA") + shell.expect("CONDA_") + shell.sendline('echo "PATH=$PATH"') + shell.expect("PATH=") + shell.assert_env_var("PS1", "(charizard).*") + shell.assert_env_var("CONDA_SHLVL", "3") + PATH3 = shell.get_env_var("PATH") + assert len(PATH0.split(":")) + num_paths_added == len(PATH3.split(":")) + + CONDA_EXE2 = shell.get_env_var("CONDA_EXE") + _CE_M2 = shell.get_env_var("_CE_M") + _CE_CONDA2 = shell.get_env_var("_CE_CONDA") + assert ( + CONDA_EXE == CONDA_EXE2 + ), "CONDA_EXE changed by stacked activation procedure\n:From\n{}\nto:\n{}".format( + CONDA_EXE, CONDA_EXE2 + ) + assert ( + _CE_M2 == _CE_M2 + ), "_CE_M changed by stacked activation procedure\n:From\n{}\nto:\n{}".format( + _CE_M, _CE_M2 + ) + assert ( + _CE_CONDA == _CE_CONDA2 + ), "_CE_CONDA stacked changed by activation procedure\n:From\n{}\nto:\n{}".format( + _CE_CONDA, _CE_CONDA2 + ) + + shell.sendline("conda" + install + f"-yq hdf5={HDF5_VERSION}") + shell.expect("Executing transaction: ...working... done.*\n", timeout=60) + shell.assert_env_var("?", "0", use_exact=True) + + shell.sendline("h5stat --version") + shell.expect(rf".*h5stat: Version {HDF5_VERSION}.*") # TODO: assert that reactivate worked correctly @@ -2300,283 +2617,292 @@ def basic_posix(self, shell): shell.expect(conda_is_a_function) shell.sendline(f"conda run {dev_arg} h5stat --version") - shell.expect(fr'.*h5stat: Version {HDF5_VERSION}.*') + shell.expect(rf".*h5stat: Version {HDF5_VERSION}.*") # regression test for #6840 - shell.sendline('conda' + install + '--blah') - shell.assert_env_var('?', '2', use_exact=True) - shell.sendline('conda list --blah') - shell.assert_env_var('?', '2', use_exact=True) - - shell.sendline('conda' + deactivate) - shell.assert_env_var('CONDA_SHLVL', '2') - PATH = shell.get_env_var('PATH') - assert len(PATH0.split(':')) + num_paths_added == len(PATH.split(':')) - - shell.sendline('conda' + deactivate) - shell.assert_env_var('CONDA_SHLVL', '1') - PATH = shell.get_env_var('PATH') - assert len(PATH0.split(':')) + num_paths_added == len(PATH.split(':')) - - shell.sendline('conda' + deactivate) - shell.assert_env_var('CONDA_SHLVL', '0') - PATH = shell.get_env_var('PATH') - assert len(PATH0.split(':')) == len(PATH.split(':')) + shell.sendline("conda" + install + "--blah") + shell.assert_env_var("?", "2", use_exact=True) + shell.sendline("conda list --blah") + shell.assert_env_var("?", "2", use_exact=True) + + shell.sendline("conda" + deactivate) + shell.assert_env_var("CONDA_SHLVL", "2") + PATH = shell.get_env_var("PATH") + assert len(PATH0.split(":")) + num_paths_added == len(PATH.split(":")) + + shell.sendline("conda" + deactivate) + shell.assert_env_var("CONDA_SHLVL", "1") + PATH = shell.get_env_var("PATH") + assert len(PATH0.split(":")) + num_paths_added == len(PATH.split(":")) + + shell.sendline("conda" + deactivate) + shell.assert_env_var("CONDA_SHLVL", "0") + PATH = shell.get_env_var("PATH") + assert len(PATH0.split(":")) == len(PATH.split(":")) if on_win: assert PATH0.lower() == PATH.lower() else: assert PATH0 == PATH - shell.sendline(shell.print_env_var % 'PS1') - shell.expect('.*\n') - assert 'CONDA_PROMPT_MODIFIER' not in str(shell.p.after) + shell.sendline(shell.print_env_var % "PS1") + shell.expect(".*\n") + assert "CONDA_PROMPT_MODIFIER" not in str(shell.p.after) - shell.sendline('conda' + deactivate) - shell.assert_env_var('CONDA_SHLVL', '0') + shell.sendline("conda" + deactivate) + shell.assert_env_var("CONDA_SHLVL", "0") # When fully deactivated, CONDA_EXE, _CE_M and _CE_CONDA must be retained # because the conda shell scripts use them and if they are unset activation # is not possible. - CONDA_EXED = shell.get_env_var('CONDA_EXE') - assert CONDA_EXED, "A fully deactivated conda shell must retain CONDA_EXE (and _CE_M and _CE_CONDA in dev)\n" \ - " as the shell scripts refer to them." - - PATH0 = shell.get_env_var('PATH') - - shell.sendline('conda' + activate + '"%s"' % prefix2_p) - shell.assert_env_var('CONDA_SHLVL', '1') - PATH1 = shell.get_env_var('PATH') - assert len(PATH0.split(':')) + num_paths_added == len(PATH1.split(':')) - - shell.sendline('conda' + activate + '"%s" --stack' % self.prefix3) - shell.assert_env_var('CONDA_SHLVL', '2') - PATH2 = shell.get_env_var('PATH') - assert 'charizard' in PATH2 - assert 'venusaur' in PATH2 - assert len(PATH0.split(':')) + num_paths_added * 2 == len(PATH2.split(':')) - - shell.sendline('conda' + activate + '"%s"' % prefix_p) - shell.assert_env_var('CONDA_SHLVL', '3') - PATH3 = shell.get_env_var('PATH') - assert 'charizard' in PATH3 - assert 'venusaur' not in PATH3 - assert len(PATH0.split(':')) + num_paths_added * 2 == len(PATH3.split(':')) - - shell.sendline('conda' + deactivate) - shell.assert_env_var('CONDA_SHLVL', '2') - PATH4 = shell.get_env_var('PATH') - assert 'charizard' in PATH4 - assert 'venusaur' in PATH4 + CONDA_EXED = shell.get_env_var("CONDA_EXE") + assert CONDA_EXED, ( + "A fully deactivated conda shell must retain CONDA_EXE (and _CE_M and _CE_CONDA in dev)\n" + " as the shell scripts refer to them." + ) + + PATH0 = shell.get_env_var("PATH") + + shell.sendline("conda" + activate + '"%s"' % prefix2_p) + shell.assert_env_var("CONDA_SHLVL", "1") + PATH1 = shell.get_env_var("PATH") + assert len(PATH0.split(":")) + num_paths_added == len(PATH1.split(":")) + + shell.sendline("conda" + activate + '"%s" --stack' % self.prefix3) + shell.assert_env_var("CONDA_SHLVL", "2") + PATH2 = shell.get_env_var("PATH") + assert "charizard" in PATH2 + assert "venusaur" in PATH2 + assert len(PATH0.split(":")) + num_paths_added * 2 == len(PATH2.split(":")) + + shell.sendline("conda" + activate + '"%s"' % prefix_p) + shell.assert_env_var("CONDA_SHLVL", "3") + PATH3 = shell.get_env_var("PATH") + assert "charizard" in PATH3 + assert "venusaur" not in PATH3 + assert len(PATH0.split(":")) + num_paths_added * 2 == len(PATH3.split(":")) + + shell.sendline("conda" + deactivate) + shell.assert_env_var("CONDA_SHLVL", "2") + PATH4 = shell.get_env_var("PATH") + assert "charizard" in PATH4 + assert "venusaur" in PATH4 if on_win: assert PATH4.lower() == PATH2.lower() else: assert PATH4 == PATH2 - shell.sendline('conda' + deactivate) - shell.assert_env_var('CONDA_SHLVL', '1') - PATH5 = shell.get_env_var('PATH') + shell.sendline("conda" + deactivate) + shell.assert_env_var("CONDA_SHLVL", "1") + PATH5 = shell.get_env_var("PATH") if on_win: assert PATH1.lower() == PATH5.lower() else: assert PATH1 == PATH5 # Test auto_stack - shell.sendline('conda config --env --set auto_stack 1' ) - - shell.sendline('conda' + activate + '"%s"' % self.prefix3) - shell.assert_env_var('CONDA_SHLVL', '2') - PATH2 = shell.get_env_var('PATH') - assert 'charizard' in PATH2 - assert 'venusaur' in PATH2 - assert len(PATH0.split(':')) + num_paths_added * 2 == len(PATH2.split(':')) - - shell.sendline('conda' + activate + '"%s"' % prefix_p) - shell.assert_env_var('CONDA_SHLVL', '3') - PATH3 = shell.get_env_var('PATH') - assert 'charizard' in PATH3 - assert 'venusaur' not in PATH3 - assert len(PATH0.split(':')) + num_paths_added * 2 == len(PATH3.split(':')) + shell.sendline("conda config --env --set auto_stack 1") + + shell.sendline("conda" + activate + '"%s"' % self.prefix3) + shell.assert_env_var("CONDA_SHLVL", "2") + PATH2 = shell.get_env_var("PATH") + assert "charizard" in PATH2 + assert "venusaur" in PATH2 + assert len(PATH0.split(":")) + num_paths_added * 2 == len(PATH2.split(":")) + + shell.sendline("conda" + activate + '"%s"' % prefix_p) + shell.assert_env_var("CONDA_SHLVL", "3") + PATH3 = shell.get_env_var("PATH") + assert "charizard" in PATH3 + assert "venusaur" not in PATH3 + assert len(PATH0.split(":")) + num_paths_added * 2 == len(PATH3.split(":")) @pytest.mark.flaky(reruns=5) @pytest.mark.skipif(bash_unsupported(), reason=bash_unsupported_because()) def test_bash_basic_integration(self): - with InteractiveShell('bash') as shell: + with InteractiveShell("bash") as shell: self.basic_posix(shell) - @pytest.mark.skipif(not which('dash') or on_win, reason='dash not installed') + @pytest.mark.skipif(not which("dash") or on_win, reason="dash not installed") def test_dash_basic_integration(self): - with InteractiveShell('dash') as shell: + with InteractiveShell("dash") as shell: self.basic_posix(shell) - @pytest.mark.skipif(not which('zsh'), reason='zsh not installed') + @pytest.mark.skipif(not which("zsh"), reason="zsh not installed") def test_zsh_basic_integration(self): - with InteractiveShell('zsh') as shell: + with InteractiveShell("zsh") as shell: self.basic_posix(shell) def basic_csh(self, shell): shell.sendline("conda --version") shell.p.expect_exact("conda " + conda_version) - shell.assert_env_var('CONDA_SHLVL', '0') - shell.sendline('conda activate base') - shell.assert_env_var('prompt', '(base).*') - shell.assert_env_var('CONDA_SHLVL', '1') + shell.assert_env_var("CONDA_SHLVL", "0") + shell.sendline("conda activate base") + shell.assert_env_var("prompt", "(base).*") + shell.assert_env_var("CONDA_SHLVL", "1") shell.sendline('conda activate "%s"' % self.prefix) - shell.assert_env_var('CONDA_SHLVL', '2') - shell.assert_env_var('CONDA_PREFIX', self.prefix, True) - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '1') - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '0') + shell.assert_env_var("CONDA_SHLVL", "2") + shell.assert_env_var("CONDA_PREFIX", self.prefix, True) + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "1") + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "0") - assert 'CONDA_PROMPT_MODIFIER' not in str(shell.p.after) + assert "CONDA_PROMPT_MODIFIER" not in str(shell.p.after) - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '0') + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "0") - @pytest.mark.skipif(not which('csh'), reason='csh not installed') - @pytest.mark.xfail(reason="pure csh doesn't support argument passing to sourced scripts") + @pytest.mark.skipif(not which("csh"), reason="csh not installed") + @pytest.mark.xfail( + reason="pure csh doesn't support argument passing to sourced scripts" + ) def test_csh_basic_integration(self): - with InteractiveShell('csh') as shell: + with InteractiveShell("csh") as shell: self.basic_csh(shell) - @pytest.mark.skipif(not which('tcsh'), reason='tcsh not installed') + @pytest.mark.skipif(not which("tcsh"), reason="tcsh not installed") @pytest.mark.xfail(reason="punting until we officially enable support for tcsh") def test_tcsh_basic_integration(self): - with InteractiveShell('tcsh') as shell: + with InteractiveShell("tcsh") as shell: self.basic_csh(shell) - @pytest.mark.skipif(not which('fish'), reason='fish not installed') + @pytest.mark.skipif(not which("fish"), reason="fish not installed") @pytest.mark.xfail(reason="fish and pexpect don't seem to work together?") def test_fish_basic_integration(self): - with InteractiveShell('fish') as shell: - shell.sendline('env | sort') + with InteractiveShell("fish") as shell: + shell.sendline("env | sort") # We should be seeing environment variable output to terminal with this line, but # we aren't. Haven't experienced this problem yet with any other shell... - shell.assert_env_var('CONDA_SHLVL', '0') - shell.sendline('conda activate base') - shell.assert_env_var('CONDA_SHLVL', '1') + shell.assert_env_var("CONDA_SHLVL", "0") + shell.sendline("conda activate base") + shell.assert_env_var("CONDA_SHLVL", "1") shell.sendline('conda activate "%s"' % self.prefix) - shell.assert_env_var('CONDA_SHLVL', '2') - shell.assert_env_var('CONDA_PREFIX', self.prefix, True) - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '1') - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '0') + shell.assert_env_var("CONDA_SHLVL", "2") + shell.assert_env_var("CONDA_PREFIX", self.prefix, True) + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "1") + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "0") - shell.sendline(shell.print_env_var % 'PS1') - shell.expect('.*\n') - assert 'CONDA_PROMPT_MODIFIER' not in str(shell.p.after) + shell.sendline(shell.print_env_var % "PS1") + shell.expect(".*\n") + assert "CONDA_PROMPT_MODIFIER" not in str(shell.p.after) - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '0') + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "0") - @pytest.mark.skipif(not which_powershell(), reason='PowerShell not installed') + @pytest.mark.skipif(not which_powershell(), reason="PowerShell not installed") def test_powershell_basic_integration(self): - charizard = join(self.prefix, 'envs', 'charizard') - venusaur = join(self.prefix, 'envs', 'venusaur') + charizard = join(self.prefix, "envs", "charizard") + venusaur = join(self.prefix, "envs", "venusaur") posh_kind, posh_path = which_powershell() print(f"## [PowerShell integration] Using {posh_path}.") with InteractiveShell(posh_kind) as shell: - print('## [PowerShell integration] Starting test.') - shell.sendline('(Get-Command conda).CommandType') - shell.p.expect_exact('Alias') - shell.sendline('(Get-Command conda).Definition') - shell.p.expect_exact('Invoke-Conda') - shell.sendline('(Get-Command Invoke-Conda).Definition') - - print('## [PowerShell integration] Activating.') + print("## [PowerShell integration] Starting test.") + shell.sendline("(Get-Command conda).CommandType") + shell.p.expect_exact("Alias") + shell.sendline("(Get-Command conda).Definition") + shell.p.expect_exact("Invoke-Conda") + shell.sendline("(Get-Command Invoke-Conda).Definition") + + print("## [PowerShell integration] Activating.") shell.sendline('conda activate "%s"' % charizard) - shell.assert_env_var('CONDA_SHLVL', '1\r?') - PATH = shell.get_env_var('PATH') - assert 'charizard' in PATH + shell.assert_env_var("CONDA_SHLVL", "1\r?") + PATH = shell.get_env_var("PATH") + assert "charizard" in PATH shell.sendline("conda --version") shell.p.expect_exact("conda " + conda_version) shell.sendline('conda activate "%s"' % self.prefix) - shell.assert_env_var('CONDA_SHLVL', '2\r?') - shell.assert_env_var('CONDA_PREFIX', self.prefix, True) + shell.assert_env_var("CONDA_SHLVL", "2\r?") + shell.assert_env_var("CONDA_PREFIX", self.prefix, True) - shell.sendline('conda deactivate') - PATH = shell.get_env_var('PATH') - assert 'charizard' in PATH + shell.sendline("conda deactivate") + PATH = shell.get_env_var("PATH") + assert "charizard" in PATH shell.sendline('conda activate -stack "%s"' % venusaur) - PATH = shell.get_env_var('PATH') - assert 'venusaur' in PATH - assert 'charizard' in PATH - - print('## [PowerShell integration] Installing.') - shell.sendline(f'conda install -yq hdf5={HDF5_VERSION}') - shell.expect('Executing transaction: ...working... done.*\n', timeout=100) - shell.sendline('$LASTEXITCODE') - shell.expect('0') + PATH = shell.get_env_var("PATH") + assert "venusaur" in PATH + assert "charizard" in PATH + + print("## [PowerShell integration] Installing.") + shell.sendline(f"conda install -yq hdf5={HDF5_VERSION}") + shell.expect("Executing transaction: ...working... done.*\n", timeout=100) + shell.sendline("$LASTEXITCODE") + shell.expect("0") # TODO: assert that reactivate worked correctly - print('## [PowerShell integration] Checking installed version.') - shell.sendline('h5stat --version') - shell.expect(fr'.*h5stat: Version {HDF5_VERSION}.*') + print("## [PowerShell integration] Checking installed version.") + shell.sendline("h5stat --version") + shell.expect(rf".*h5stat: Version {HDF5_VERSION}.*") # conda run integration test print("## [PowerShell integration] Checking conda run.") shell.sendline(f"conda run {dev_arg} h5stat --version") - shell.expect(fr'.*h5stat: Version {HDF5_VERSION}.*') - - print('## [PowerShell integration] Deactivating') - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '1\r?') - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '0\r?') - shell.sendline('conda deactivate') - shell.assert_env_var('CONDA_SHLVL', '0\r?') - - - @pytest.mark.skipif(not which_powershell() or not on_win, - reason="Windows, PowerShell specific test") + shell.expect(rf".*h5stat: Version {HDF5_VERSION}.*") + + print("## [PowerShell integration] Deactivating") + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "1\r?") + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "0\r?") + shell.sendline("conda deactivate") + shell.assert_env_var("CONDA_SHLVL", "0\r?") + + @pytest.mark.skipif( + not which_powershell() or not on_win, reason="Windows, PowerShell specific test" + ) def test_powershell_PATH_management(self): posh_kind, posh_path = which_powershell() print(f"## [PowerShell activation PATH management] Using {posh_path}.") with InteractiveShell(posh_kind) as shell: - prefix = join(self.prefix, 'envs', 'test') - print('## [PowerShell activation PATH management] Starting test.') - shell.sendline('(Get-Command conda).CommandType') - shell.p.expect_exact('Alias') - shell.sendline('(Get-Command conda).Definition') - shell.p.expect_exact('Invoke-Conda') - shell.sendline('(Get-Command Invoke-Conda).Definition') - shell.p.expect('.*\n') - - shell.sendline('conda deactivate') - shell.sendline('conda deactivate') - - PATH0 = shell.get_env_var('PATH', '') + prefix = join(self.prefix, "envs", "test") + print("## [PowerShell activation PATH management] Starting test.") + shell.sendline("(Get-Command conda).CommandType") + shell.p.expect_exact("Alias") + shell.sendline("(Get-Command conda).Definition") + shell.p.expect_exact("Invoke-Conda") + shell.sendline("(Get-Command Invoke-Conda).Definition") + shell.p.expect(".*\n") + + shell.sendline("conda deactivate") + shell.sendline("conda deactivate") + + PATH0 = shell.get_env_var("PATH", "") print(f"PATH is {PATH0.split(os.pathsep)}") - shell.sendline('(Get-Command conda).CommandType') - shell.p.expect_exact('Alias') + shell.sendline("(Get-Command conda).CommandType") + shell.p.expect_exact("Alias") shell.sendline(f'conda create -yqp "{prefix}" bzip2') - shell.expect('Executing transaction: ...working... done.*\n') + shell.expect("Executing transaction: ...working... done.*\n") - - @pytest.mark.skipif(not which('cmd.exe'), reason='cmd.exe not installed') + @pytest.mark.skipif(not which("cmd.exe"), reason="cmd.exe not installed") def test_cmd_exe_basic_integration(self): - charizard = join(self.prefix, 'envs', 'charizard') - conda_bat = join(CONDA_PACKAGE_ROOT, 'shell', 'condabin', 'conda.bat') - with env_vars({'PATH': "C:\\Windows\\system32;C:\\Windows;C:\\Windows\\System32\\Wbem;C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\"}, - stack_callback=conda_tests_ctxt_mgmt_def_pol): - with InteractiveShell('cmd.exe') as shell: - shell.expect('.*\n') + charizard = join(self.prefix, "envs", "charizard") + conda_bat = join(CONDA_PACKAGE_ROOT, "shell", "condabin", "conda.bat") + with env_vars( + { + "PATH": "C:\\Windows\\system32;C:\\Windows;C:\\Windows\\System32\\Wbem;C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\" + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with InteractiveShell("cmd.exe") as shell: + shell.expect(".*\n") - shell.assert_env_var('_CE_CONDA', 'conda\r') - shell.assert_env_var('_CE_M', '-m\r') - shell.assert_env_var('CONDA_EXE', escape(sys.executable) + '\r') + shell.assert_env_var("_CE_CONDA", "conda\r") + shell.assert_env_var("_CE_M", "-m\r") + shell.assert_env_var("CONDA_EXE", escape(sys.executable) + "\r") # We use 'PowerShell' here because 'where conda' returns all of them and # p.expect_exact does not do what you would think it does given its name. - shell.sendline('powershell -NoProfile -c ("get-command conda | Format-List Source")') - shell.p.expect_exact('Source : ' + conda_bat) + shell.sendline( + 'powershell -NoProfile -c ("get-command conda | Format-List Source")' + ) + shell.p.expect_exact("Source : " + conda_bat) shell.sendline("chcp") shell.expect(".*\n") - PATH0 = shell.get_env_var('PATH', '').split(os.pathsep) + PATH0 = shell.get_env_var("PATH", "").split(os.pathsep) print(PATH0) shell.sendline('conda activate --dev "%s"' % charizard) @@ -2584,27 +2910,31 @@ def test_cmd_exe_basic_integration(self): shell.expect(".*\n") shell.assert_env_var("CONDA_SHLVL", "1\r") - PATH1 = shell.get_env_var('PATH', '').split(os.pathsep) + PATH1 = shell.get_env_var("PATH", "").split(os.pathsep) print(PATH1) - shell.sendline('powershell -NoProfile -c ("get-command conda | Format-List Source")') - shell.p.expect_exact('Source : ' + conda_bat) - - shell.assert_env_var('_CE_CONDA', 'conda\r') - shell.assert_env_var('_CE_M', '-m\r') - shell.assert_env_var('CONDA_EXE', escape(sys.executable) + '\r') - shell.assert_env_var('CONDA_PREFIX', charizard, True) - PATH2 = shell.get_env_var('PATH', '').split(os.pathsep) + shell.sendline( + 'powershell -NoProfile -c ("get-command conda | Format-List Source")' + ) + shell.p.expect_exact("Source : " + conda_bat) + + shell.assert_env_var("_CE_CONDA", "conda\r") + shell.assert_env_var("_CE_M", "-m\r") + shell.assert_env_var("CONDA_EXE", escape(sys.executable) + "\r") + shell.assert_env_var("CONDA_PREFIX", charizard, True) + PATH2 = shell.get_env_var("PATH", "").split(os.pathsep) print(PATH2) - shell.sendline('powershell -NoProfile -c ("get-command conda -All | Format-List Source")') - shell.p.expect_exact('Source : ' + conda_bat) + shell.sendline( + 'powershell -NoProfile -c ("get-command conda -All | Format-List Source")' + ) + shell.p.expect_exact("Source : " + conda_bat) shell.sendline('conda activate --dev "%s"' % self.prefix) - shell.assert_env_var('_CE_CONDA', 'conda\r') - shell.assert_env_var('_CE_M', '-m\r') - shell.assert_env_var('CONDA_EXE', escape(sys.executable) + '\r') - shell.assert_env_var('CONDA_SHLVL', '2\r') - shell.assert_env_var('CONDA_PREFIX', self.prefix, True) + shell.assert_env_var("_CE_CONDA", "conda\r") + shell.assert_env_var("_CE_M", "-m\r") + shell.assert_env_var("CONDA_EXE", escape(sys.executable) + "\r") + shell.assert_env_var("CONDA_SHLVL", "2\r") + shell.assert_env_var("CONDA_PREFIX", self.prefix, True) # TODO: Make a dummy package and release it (somewhere?) # should be a relatively light package, but also @@ -2613,75 +2943,82 @@ def test_cmd_exe_basic_integration(self): # not require an old or incompatible version of any # library critical to the correct functioning of # Python (e.g. OpenSSL). - shell.sendline(f'conda install -yq hdf5={HDF5_VERSION}') - shell.expect('Executing transaction: ...working... done.*\n', timeout=100) - shell.assert_env_var('errorlevel', '0', True) + shell.sendline(f"conda install -yq hdf5={HDF5_VERSION}") + shell.expect( + "Executing transaction: ...working... done.*\n", timeout=100 + ) + shell.assert_env_var("errorlevel", "0", True) # TODO: assert that reactivate worked correctly - shell.sendline('h5stat --version') - shell.expect(fr'.*h5stat: Version {HDF5_VERSION}.*') + shell.sendline("h5stat --version") + shell.expect(rf".*h5stat: Version {HDF5_VERSION}.*") # conda run integration test shell.sendline(f"conda run {dev_arg} h5stat --version") - shell.expect(fr'.*h5stat: Version {HDF5_VERSION}.*') + shell.expect(rf".*h5stat: Version {HDF5_VERSION}.*") - shell.sendline('conda deactivate --dev') - shell.assert_env_var('CONDA_SHLVL', '1\r') - shell.sendline('conda deactivate --dev') - shell.assert_env_var('CONDA_SHLVL', '0\r') - shell.sendline('conda deactivate --dev') - shell.assert_env_var('CONDA_SHLVL', '0\r') + shell.sendline("conda deactivate --dev") + shell.assert_env_var("CONDA_SHLVL", "1\r") + shell.sendline("conda deactivate --dev") + shell.assert_env_var("CONDA_SHLVL", "0\r") + shell.sendline("conda deactivate --dev") + shell.assert_env_var("CONDA_SHLVL", "0\r") @pytest.mark.skipif(bash_unsupported(), reason=bash_unsupported_because()) def test_bash_activate_error(self): context.dev = True - with InteractiveShell('bash') as shell: + with InteractiveShell("bash") as shell: shell.sendline("export CONDA_SHLVL=unaffected") if on_win: shell.sendline("uname -o") - shell.expect('(Msys|Cygwin)') + shell.expect("(Msys|Cygwin)") shell.sendline("conda activate environment-not-found-doesnt-exist") - shell.expect('Could not find conda environment: environment-not-found-doesnt-exist') - shell.assert_env_var('CONDA_SHLVL', 'unaffected') + shell.expect( + "Could not find conda environment: environment-not-found-doesnt-exist" + ) + shell.assert_env_var("CONDA_SHLVL", "unaffected") shell.sendline("conda activate -h blah blah") - shell.expect('usage: conda activate') + shell.expect("usage: conda activate") - @pytest.mark.skipif(not which('cmd.exe'), reason='cmd.exe not installed') + @pytest.mark.skipif(not which("cmd.exe"), reason="cmd.exe not installed") def test_cmd_exe_activate_error(self): context.dev = True - with InteractiveShell('cmd.exe') as shell: + with InteractiveShell("cmd.exe") as shell: shell.sendline("set") - shell.expect('.*') + shell.expect(".*") shell.sendline("conda activate --dev environment-not-found-doesnt-exist") - shell.expect('Could not find conda environment: environment-not-found-doesnt-exist') - shell.expect('.*') - shell.assert_env_var('errorlevel', '1\r') + shell.expect( + "Could not find conda environment: environment-not-found-doesnt-exist" + ) + shell.expect(".*") + shell.assert_env_var("errorlevel", "1\r") shell.sendline("conda activate -h blah blah") - shell.expect('usage: conda activate') + shell.expect("usage: conda activate") @pytest.mark.flaky(reruns=5) @pytest.mark.skipif(bash_unsupported(), reason=bash_unsupported_because()) def test_legacy_activate_deactivate_bash(self): - with InteractiveShell('bash') as shell: - + with InteractiveShell("bash") as shell: # calling bash -l, as we do for MSYS2, may cause conda activation. - shell.sendline('conda deactivate') - shell.sendline('conda deactivate') - shell.sendline('conda deactivate') - shell.sendline('conda deactivate') - shell.expect('.*\n') + shell.sendline("conda deactivate") + shell.sendline("conda deactivate") + shell.sendline("conda deactivate") + shell.sendline("conda deactivate") + shell.expect(".*\n") activator = PosixActivator() CONDA_PACKAGE_ROOT_p = activator.path_conversion(CONDA_PACKAGE_ROOT) prefix2_p = activator.path_conversion(self.prefix2) prefix3_p = activator.path_conversion(self.prefix3) shell.sendline("export _CONDA_ROOT='%s/shell'" % CONDA_PACKAGE_ROOT_p) - shell.sendline(f'source "${{_CONDA_ROOT}}/bin/activate" {dev_arg} "{prefix2_p}"') + shell.sendline( + f'source "${{_CONDA_ROOT}}/bin/activate" {dev_arg} "{prefix2_p}"' + ) PATH0 = shell.get_env_var("PATH") - assert 'charizard' in PATH0 + assert "charizard" in PATH0 shell.sendline("type conda") shell.expect("conda is a function") @@ -2689,56 +3026,59 @@ def test_legacy_activate_deactivate_bash(self): shell.sendline("conda --version") shell.p.expect_exact("conda " + conda_version) - shell.sendline(f'source "${{_CONDA_ROOT}}/bin/activate" {dev_arg} "{prefix3_p}"') + shell.sendline( + f'source "${{_CONDA_ROOT}}/bin/activate" {dev_arg} "{prefix3_p}"' + ) PATH1 = shell.get_env_var("PATH") - assert 'venusaur' in PATH1 + assert "venusaur" in PATH1 shell.sendline('source "${_CONDA_ROOT}/bin/deactivate"') PATH2 = shell.get_env_var("PATH") - assert 'charizard' in PATH2 + assert "charizard" in PATH2 shell.sendline('source "${_CONDA_ROOT}/bin/deactivate"') - shell.assert_env_var('CONDA_SHLVL', '0') + shell.assert_env_var("CONDA_SHLVL", "0") - @pytest.mark.skipif(not which('cmd.exe'), reason='cmd.exe not installed') + @pytest.mark.skipif(not which("cmd.exe"), reason="cmd.exe not installed") def test_legacy_activate_deactivate_cmd_exe(self): - with InteractiveShell('cmd.exe') as shell: + with InteractiveShell("cmd.exe") as shell: shell.sendline("echo off") - conda__ce_conda = shell.get_env_var('_CE_CONDA') - assert conda__ce_conda == 'conda' + conda__ce_conda = shell.get_env_var("_CE_CONDA") + assert conda__ce_conda == "conda" PATH = "%s\\shell\\Scripts;%%PATH%%" % CONDA_PACKAGE_ROOT shell.sendline("SET PATH=" + PATH) shell.sendline('activate --dev "%s"' % self.prefix2) - shell.expect('.*\n') + shell.expect(".*\n") - conda_shlvl = shell.get_env_var('CONDA_SHLVL') - assert conda_shlvl == '1', conda_shlvl + conda_shlvl = shell.get_env_var("CONDA_SHLVL") + assert conda_shlvl == "1", conda_shlvl PATH = shell.get_env_var("PATH") - assert 'charizard' in PATH + assert "charizard" in PATH - conda__ce_conda = shell.get_env_var('_CE_CONDA') - assert conda__ce_conda == 'conda' + conda__ce_conda = shell.get_env_var("_CE_CONDA") + assert conda__ce_conda == "conda" shell.sendline("conda --version") shell.p.expect_exact("conda " + conda_version) shell.sendline('activate.bat --dev "%s"' % self.prefix3) PATH = shell.get_env_var("PATH") - assert 'venusaur' in PATH + assert "venusaur" in PATH shell.sendline("deactivate.bat --dev") PATH = shell.get_env_var("PATH") - assert 'charizard' in PATH + assert "charizard" in PATH shell.sendline("deactivate --dev") - conda_shlvl = shell.get_env_var('CONDA_SHLVL') - assert conda_shlvl == '0', conda_shlvl + conda_shlvl = shell.get_env_var("CONDA_SHLVL") + assert conda_shlvl == "0", conda_shlvl + @pytest.fixture(scope="module") def prefix(): @@ -2758,17 +3098,22 @@ def prefix(): rm_rf(root) + @pytest.mark.integration @pytest.mark.parametrize( ["shell"], [ pytest.param( "bash", - marks=pytest.mark.skipif(bash_unsupported(), reason=bash_unsupported_because()), + marks=pytest.mark.skipif( + bash_unsupported(), reason=bash_unsupported_because() + ), ), pytest.param( "cmd.exe", - marks=pytest.mark.skipif(not which("cmd.exe"), reason="cmd.exe not installed"), + marks=pytest.mark.skipif( + not which("cmd.exe"), reason="cmd.exe not installed" + ), ), ], ) diff --git a/tests/test_api.py b/tests/test_api.py index 8523f2e66da..dd6a7b96419 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -1,7 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - try: from inspect import getfullargspec as getargspec except ImportError: @@ -9,8 +7,14 @@ import pytest -from conda.api import DepsModifier, PackageCacheData, PrefixData, Solver, SubdirData, \ - UpdateModifier +from conda.api import ( + DepsModifier, + PackageCacheData, + PrefixData, + Solver, + SubdirData, + UpdateModifier, +) from conda.base.context import context from conda.common.compat import isiterable from conda.common.constants import NULL @@ -30,7 +34,9 @@ def inspect_arguments(f, arguments): defaults = result[3] or () default_val_first_idx = len(arg_names) - len(defaults) arg_values = [PositionalArgument] * default_val_first_idx + list(defaults) - for (recorded_name, recorded_value), (arg_name, arg_value) in zip(arguments.items(), zip(arg_names, arg_values)): + for (recorded_name, recorded_value), (arg_name, arg_value) in zip( + arguments.items(), zip(arg_names, arg_values) + ): print(recorded_name, arg_name) assert recorded_name == arg_name assert recorded_value == arg_value @@ -96,7 +102,7 @@ def test_Solver_inputs_contract(): @pytest.mark.integration def test_Solver_return_value_contract(): - solver = Solver('/', (Channel('pkgs/main'),), specs_to_add=('openssl',)) + solver = Solver("/", (Channel("pkgs/main"),), specs_to_add=("openssl",)) solve_final_state_rv = solver.solve_final_state() assert isiterable(solve_final_state_rv) assert all(isinstance(pref, PackageRecord) for pref in solve_final_state_rv) @@ -142,12 +148,12 @@ def test_SubdirData_contract(): @pytest.mark.integration def test_SubdirData_return_value_contract(): - sd = SubdirData(Channel('pkgs/main/linux-64')) - query_result = sd.query('openssl') + sd = SubdirData(Channel("pkgs/main/linux-64")) + query_result = sd.query("openssl") assert isinstance(query_result, tuple) assert all(isinstance(prec, PackageRecord) for prec in query_result) - query_all_result = sd.query_all('openssl', (Channel('pkgs/main'),), context.subdirs) + query_all_result = sd.query_all("openssl", (Channel("pkgs/main"),), context.subdirs) assert isinstance(query_all_result, tuple) assert all(isinstance(prec, PackageRecord) for prec in query_all_result) @@ -205,11 +211,11 @@ def test_PackageCacheData_return_value_contract(): get_result = pc.get(PackageRecord.from_objects(single_pcrec)) assert isinstance(get_result, PackageCacheRecord) - query_result = pc.query('openssl') + query_result = pc.query("openssl") assert isinstance(query_result, tuple) assert all(isinstance(pcrec, PackageCacheRecord) for pcrec in query_result) - query_all_result = PackageCacheData.query_all('openssl') + query_all_result = PackageCacheData.query_all("openssl") assert isinstance(query_all_result, tuple) assert all(isinstance(pcrec, PackageCacheRecord) for pcrec in query_all_result) @@ -263,13 +269,15 @@ def test_PrefixData_return_value_contract(): get_result = pd.get(PackageRecord.from_objects(single_prefix_rec)) assert isinstance(get_result, PrefixRecord) - query_result = pd.query('openssl') + query_result = pd.query("openssl") assert isinstance(query_result, tuple) assert all(isinstance(prefix_rec, PrefixRecord) for prefix_rec in query_result) iter_records_result = pd.iter_records() assert isiterable(iter_records_result) - assert all(isinstance(prefix_rec, PrefixRecord) for prefix_rec in iter_records_result) + assert all( + isinstance(prefix_rec, PrefixRecord) for prefix_rec in iter_records_result + ) is_writable_result = pd.is_writable assert is_writable_result is True or is_writable_result is False diff --git a/tests/test_cli.py b/tests/test_cli.py index 03dfa073bf5..1060a934ac0 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,12 +1,11 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import json -import unittest -import uuid import os import re import stat +import unittest +import uuid from unittest.mock import patch import pytest @@ -15,7 +14,12 @@ from conda.base.context import context from conda.gateways.disk.delete import rm_rf from conda.testing.helpers import capture_json_with_argv, run_inprocess_conda_command -from conda.testing.integration import Commands, run_command, make_temp_env, make_temp_prefix +from conda.testing.integration import ( + Commands, + make_temp_env, + make_temp_prefix, + run_command, +) @pytest.mark.usefixtures("tmpdir") @@ -61,7 +65,9 @@ def test_config(self): res = capture_json_with_argv("conda config --get channels --system --json") self.assertJsonSuccess(res) - res = capture_json_with_argv("conda config --get channels --file tempfile.rc --json") + res = capture_json_with_argv( + "conda config --get channels --file tempfile.rc --json" + ) self.assertJsonSuccess(res) res = capture_json_with_argv( @@ -73,7 +79,10 @@ def test_config(self): self.assertJsonSuccess(res) @pytest.mark.integration - @patch("conda.core.envs_manager.get_user_environments_txt_file", return_value=os.devnull) + @patch( + "conda.core.envs_manager.get_user_environments_txt_file", + return_value=os.devnull, + ) def test_info(self, _mocked_guetf): res = capture_json_with_argv("conda info --json") keys = ( @@ -109,20 +118,30 @@ def test_list(self, mockable_context_envs_dirs): self.assertIsInstance(res, list) res = capture_json_with_argv("conda list -r --json") - self.assertTrue(isinstance(res, list) or (isinstance(res, dict) and "error" in res)) + self.assertTrue( + isinstance(res, list) or (isinstance(res, dict) and "error" in res) + ) res = capture_json_with_argv("conda list ipython --json") self.assertIsInstance(res, list) - stdout, stderr, rc = run_inprocess_conda_command("conda list --name nonexistent --json") - assert json.loads(stdout.strip())["exception_name"] == "EnvironmentLocationNotFound" + stdout, stderr, rc = run_inprocess_conda_command( + "conda list --name nonexistent --json" + ) + assert ( + json.loads(stdout.strip())["exception_name"] + == "EnvironmentLocationNotFound" + ) assert stderr == "" assert rc > 0 stdout, stderr, rc = run_inprocess_conda_command( "conda list --name nonexistent --revisions --json" ) - assert json.loads(stdout.strip())["exception_name"] == "EnvironmentLocationNotFound" + assert ( + json.loads(stdout.strip())["exception_name"] + == "EnvironmentLocationNotFound" + ) assert stderr == "" assert rc > 0 @@ -135,7 +154,10 @@ def test_compare(self, mockable_context_envs_dirs): stdout, stderr, rc = run_inprocess_conda_command( "conda compare --name nonexistent tempfile.rc --json" ) - assert json.loads(stdout.strip())["exception_name"] == "EnvironmentLocationNotFound" + assert ( + json.loads(stdout.strip())["exception_name"] + == "EnvironmentLocationNotFound" + ) assert stderr == "" assert rc > 0 assert mockable_context_envs_dirs.call_count > 0 @@ -145,7 +167,9 @@ def test_search_0(self): # searching for everything is quite slow; search without name, few # matching packages. py_3 is not a special build tag, but there are just # a few of them in defaults. - stdout, stderr, rc = run_inprocess_conda_command("conda search *[build=py_3] --json --override-channels -c defaults") + stdout, stderr, rc = run_inprocess_conda_command( + "conda search *[build=py_3] --json --override-channels -c defaults" + ) assert stderr == "" assert rc is None @@ -164,7 +188,12 @@ def test_search_0(self): @pytest.mark.integration def test_search_1(self): - self.assertIsInstance(capture_json_with_argv("conda search ipython --json --override-channels -c defaults"), dict) + self.assertIsInstance( + capture_json_with_argv( + "conda search ipython --json --override-channels -c defaults" + ), + dict, + ) @pytest.mark.integration def test_search_2(self): @@ -190,8 +219,13 @@ def test_search_2(self): # exact match not found, search wildcards stdout, _, _ = run_command( - Commands.SEARCH, prefix, "ython", - "--override-channels", "-c", "defaults", use_exception_handler=True + Commands.SEARCH, + prefix, + "ython", + "--override-channels", + "-c", + "defaults", + use_exception_handler=True, ) assert re.search( @@ -228,13 +262,19 @@ def test_search_3(self): @pytest.mark.integration def test_search_4(self): self.assertIsInstance( - capture_json_with_argv("conda search --json --override-channels -c defaults --use-index-cache python"), dict + capture_json_with_argv( + "conda search --json --override-channels -c defaults --use-index-cache python" + ), + dict, ) @pytest.mark.integration def test_search_5(self): self.assertIsInstance( - capture_json_with_argv("conda search --platform win-32 --json --override-channels -c defaults python"), dict + capture_json_with_argv( + "conda search --platform win-32 --json --override-channels -c defaults python" + ), + dict, ) @@ -250,7 +290,9 @@ def test_search_envs(): def test_run_returns_int(): prefix = make_temp_prefix(name="test") with make_temp_env(prefix=prefix): - stdout, stderr, result = run_inprocess_conda_command(f"conda run -p {prefix} echo hi") + stdout, stderr, result = run_inprocess_conda_command( + f"conda run -p {prefix} echo hi" + ) assert isinstance(result, int) @@ -258,7 +300,9 @@ def test_run_returns_int(): def test_run_returns_zero_errorlevel(): prefix = make_temp_prefix(name="test") with make_temp_env(prefix=prefix): - stdout, stderr, result = run_inprocess_conda_command(f"conda run -p {prefix} exit 0") + stdout, stderr, result = run_inprocess_conda_command( + f"conda run -p {prefix} exit 0" + ) assert result == 0 @@ -266,7 +310,9 @@ def test_run_returns_zero_errorlevel(): def test_run_returns_nonzero_errorlevel(): prefix = make_temp_prefix(name="test") with make_temp_env(prefix=prefix) as prefix: - stdout, stderr, result = run_inprocess_conda_command(f'conda run -p "{prefix}" exit 5') + stdout, stderr, result = run_inprocess_conda_command( + f'conda run -p "{prefix}" exit 5' + ) assert result == 5 @@ -312,7 +358,9 @@ def reset_permissions(): assert raise_ok - stdout, stderr, result = run_inprocess_conda_command(f"conda run -p {prefix} exit 0") + stdout, stderr, result = run_inprocess_conda_command( + f"conda run -p {prefix} exit 0" + ) # Reset permissions in case all goes according to plan reset_permissions() diff --git a/tests/test_create.py b/tests/test_create.py index 44329f461c0..f7c16043e17 100644 --- a/tests/test_create.py +++ b/tests/test_create.py @@ -1,26 +1,27 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - - +import json +import os +import re +import sys from glob import glob - -from conda.auxlib.compat import Utf8NamedTemporaryFile -from conda.gateways.disk.permissions import make_read_only -from conda.gateways.disk.create import compile_multiple_pyc -from conda.models.channel import Channel -from conda.resolve import Resolve - from itertools import chain -import json from json import loads as json_loads from logging import getLogger -import os -from os.path import abspath, basename, dirname, exists, isdir, isfile, join, lexists, relpath, islink -import re +from os.path import ( + abspath, + basename, + dirname, + exists, + isdir, + isfile, + islink, + join, + lexists, + relpath, +) from shutil import copyfile, rmtree -from subprocess import check_call, check_output, Popen, PIPE -import sys +from subprocess import PIPE, Popen, check_call, check_output from textwrap import dedent from unittest import TestCase from unittest.mock import Mock, patch @@ -29,64 +30,87 @@ import pytest import requests +from conda import CondaError, CondaMultiError +from conda.auxlib.compat import Utf8NamedTemporaryFile +from conda.auxlib.ish import dals +from conda.base.constants import ( + CONDA_PACKAGE_EXTENSIONS, + PREFIX_MAGIC_FILE, + SafetyChecks, +) +from conda.base.context import ( + Context, + conda_tests_ctxt_mgmt_def_pol, + context, + reset_context, +) +from conda.common.compat import ensure_text_type, on_mac, on_win +from conda.common.io import env_var, env_vars, stderr_log_level from conda.common.iterators import groupby_to_dict as groupby - -from conda import ( - CondaError, - CondaMultiError, +from conda.common.path import ( + get_bin_directory_short_path, + get_python_site_packages_short_path, + pyc_path, ) -from conda.auxlib.ish import dals -from conda.base.constants import CONDA_PACKAGE_EXTENSIONS, SafetyChecks, PREFIX_MAGIC_FILE -from conda.base.context import Context, context, reset_context, conda_tests_ctxt_mgmt_def_pol -from conda.common.compat import ensure_text_type, on_win, on_mac -from conda.common.io import env_var, stderr_log_level, env_vars -from conda.common.path import get_bin_directory_short_path, get_python_site_packages_short_path, \ - pyc_path -from conda.common.serialize import yaml_round_trip_load, json_dump +from conda.common.serialize import json_dump, yaml_round_trip_load from conda.core.index import get_reduced_index -from conda.core.prefix_data import PrefixData, get_python_version_for_prefix from conda.core.package_cache_data import PackageCacheData +from conda.core.prefix_data import PrefixData, get_python_version_for_prefix from conda.core.subdir_data import create_cache_dir -from conda.exceptions import ArgumentError, DryRunExit, OperationNotAllowed, \ - PackagesNotFoundError, RemoveError, PackageNotInstalledError, \ - DisallowedPackageError, DirectoryNotACondaEnvironmentError, EnvironmentLocationNotFound, \ - CondaValueError +from conda.exceptions import ( + ArgumentError, + CondaValueError, + DirectoryNotACondaEnvironmentError, + DisallowedPackageError, + DryRunExit, + EnvironmentLocationNotFound, + OperationNotAllowed, + PackageNotInstalledError, + PackagesNotFoundError, + RemoveError, +) from conda.gateways.anaconda_client import read_binstar_tokens -from conda.gateways.disk.delete import rm_rf, path_is_clean +from conda.gateways.disk.create import compile_multiple_pyc +from conda.gateways.disk.delete import path_is_clean, rm_rf +from conda.gateways.disk.permissions import make_read_only from conda.gateways.disk.update import touch -from conda.gateways.subprocess import subprocess_call, subprocess_call_with_clean_env, Response +from conda.gateways.subprocess import ( + Response, + subprocess_call, + subprocess_call_with_clean_env, +) +from conda.models.channel import Channel from conda.models.match_spec import MatchSpec from conda.models.version import VersionOrder - +from conda.resolve import Resolve from conda.testing.cases import BaseTestCase from conda.testing.integration import ( BIN_DIRECTORY, PYTHON_BINARY, TEST_LOG_LEVEL, + Commands, + cp_or_copy, create_temp_location, + env_or_set, get_shortcut_dir, make_temp_channel, + make_temp_env, make_temp_package_cache, make_temp_prefix, + package_is_installed, reload_config, run_command, - Commands, - package_is_installed, - make_temp_env, tempdir, which_or_where, - cp_or_copy, - env_or_set, ) log = getLogger(__name__) -stderr_log_level(TEST_LOG_LEVEL, 'conda') -stderr_log_level(TEST_LOG_LEVEL, 'requests') +stderr_log_level(TEST_LOG_LEVEL, "conda") +stderr_log_level(TEST_LOG_LEVEL, "requests") @pytest.mark.integration class IntegrationTests(BaseTestCase): - def setUp(self): PackageCacheData.clear() @@ -99,81 +123,114 @@ def test_install_python2_and_search(self): log.warning(f"Creating empty temporary environment txt file {env_txt}") environment_txt = env_txt.name - with patch('conda.core.envs_manager.get_user_environments_txt_file', - return_value=environment_txt) as _: + with patch( + "conda.core.envs_manager.get_user_environments_txt_file", + return_value=environment_txt, + ) as _: with make_temp_env("python=2", use_restricted_unicode=on_win) as prefix: - with env_var('CONDA_ALLOW_NON_CHANNEL_URLS', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_ALLOW_NON_CHANNEL_URLS", + "true", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): assert exists(join(prefix, PYTHON_BINARY)) assert package_is_installed(prefix, "python=2") - run_command(Commands.CONFIG, prefix, "--add", "channels", "https://repo.continuum.io/pkgs/not-a-channel") + run_command( + Commands.CONFIG, + prefix, + "--add", + "channels", + "https://repo.continuum.io/pkgs/not-a-channel", + ) # regression test for #4513 - run_command(Commands.CONFIG, prefix, "--add", "channels", "https://repo.continuum.io/pkgs/not-a-channel") - stdout, stderr, _ = run_command(Commands.SEARCH, prefix, "python", "--json") + run_command( + Commands.CONFIG, + prefix, + "--add", + "channels", + "https://repo.continuum.io/pkgs/not-a-channel", + ) + stdout, stderr, _ = run_command( + Commands.SEARCH, prefix, "python", "--json" + ) packages = json.loads(stdout) assert len(packages) == 1 - stdout, stderr, _ = run_command(Commands.SEARCH, prefix, "python", "--json", "--envs") + stdout, stderr, _ = run_command( + Commands.SEARCH, prefix, "python", "--json", "--envs" + ) envs_result = json.loads(stdout) - assert any(match['location'] == prefix for match in envs_result) + assert any(match["location"] == prefix for match in envs_result) - stdout, stderr, _ = run_command(Commands.SEARCH, prefix, "python", "--envs") + stdout, stderr, _ = run_command( + Commands.SEARCH, prefix, "python", "--envs" + ) assert prefix in stdout os.unlink(environment_txt) def test_run_preserves_arguments(self): - with make_temp_env('python=3') as prefix: + with make_temp_env("python=3") as prefix: echo_args_py = os.path.join(prefix, "echo-args.py") with open(echo_args_py, "w") as echo_args: echo_args.write("import sys\n") echo_args.write("for arg in sys.argv[1:]: print(arg)\n") # If 'two two' were 'two' this test would pass. - args = ('one', 'two two', 'three') - output, _, _ = run_command(Commands.RUN, prefix, 'python', echo_args_py, *args) + args = ("one", "two two", "three") + output, _, _ = run_command( + Commands.RUN, prefix, "python", echo_args_py, *args + ) os.unlink(echo_args_py) - lines = output.split('\n') + lines = output.split("\n") for i, line in enumerate(lines): if i < len(args): - assert args[i] == line.replace('\r', '') + assert args[i] == line.replace("\r", "") def test_create_install_update_remove_smoketest(self): with make_temp_env("python=3.9") as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "python=3") - run_command(Commands.INSTALL, prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'python=3') + run_command(Commands.INSTALL, prefix, "flask=2.0.1") + assert package_is_installed(prefix, "flask=2.0.1") + assert package_is_installed(prefix, "python=3") - run_command(Commands.INSTALL, prefix, '--force-reinstall', 'flask=2.0.1') - assert package_is_installed(prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'python=3') + run_command(Commands.INSTALL, prefix, "--force-reinstall", "flask=2.0.1") + assert package_is_installed(prefix, "flask=2.0.1") + assert package_is_installed(prefix, "python=3") - run_command(Commands.UPDATE, prefix, 'flask') - assert not package_is_installed(prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'flask') - assert package_is_installed(prefix, 'python=3') + run_command(Commands.UPDATE, prefix, "flask") + assert not package_is_installed(prefix, "flask=2.0.1") + assert package_is_installed(prefix, "flask") + assert package_is_installed(prefix, "python=3") - run_command(Commands.REMOVE, prefix, 'flask') - assert not package_is_installed(prefix, 'flask=0.*') - assert package_is_installed(prefix, 'python=3') + run_command(Commands.REMOVE, prefix, "flask") + assert not package_is_installed(prefix, "flask=0.*") + assert package_is_installed(prefix, "python=3") - stdout, stderr, _ = run_command(Commands.LIST, prefix, '--revisions') + stdout, stderr, _ = run_command(Commands.LIST, prefix, "--revisions") assert not stderr assert " (rev 4)\n" in stdout assert " (rev 5)\n" not in stdout - run_command(Commands.INSTALL, prefix, '--revision', '0') - assert not package_is_installed(prefix, 'flask') - assert package_is_installed(prefix, 'python=3') + run_command(Commands.INSTALL, prefix, "--revision", "0") + assert not package_is_installed(prefix, "flask") + assert package_is_installed(prefix, "python=3") def test_install_broken_post_install_keeps_existing_folders(self): # regression test for https://github.com/conda/conda/issues/8258 with make_temp_env("python=3.5") as prefix: assert exists(join(prefix, BIN_DIRECTORY)) - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "python=3") - run_command(Commands.INSTALL, prefix, '-c', 'conda-test', 'failing_post_link', use_exception_handler=True) + run_command( + Commands.INSTALL, + prefix, + "-c", + "conda-test", + "failing_post_link", + use_exception_handler=True, + ) assert exists(join(prefix, BIN_DIRECTORY)) def test_safety_checks(self): @@ -184,44 +241,52 @@ def test_safety_checks(self): # spaces in path names. with make_temp_env() as prefix: - with open(join(prefix, 'condarc'), 'a') as fh: + with open(join(prefix, "condarc"), "a") as fh: fh.write("safety_checks: enabled\n") fh.write("extra_safety_checks: true\n") reload_config(prefix) assert context.safety_checks is SafetyChecks.enabled with pytest.raises(CondaMultiError) as exc: - run_command(Commands.INSTALL, prefix, '-c', 'conda-test', 'spiffy-test-app=0.5') + run_command( + Commands.INSTALL, prefix, "-c", "conda-test", "spiffy-test-app=0.5" + ) error_message = str(exc.value) - message1 = dals(""" + message1 = dals( + """ The path 'site-packages/spiffy_test_app-1.0-py2.7.egg-info/top_level.txt' has an incorrect size. reported size: 32 bytes actual size: 16 bytes - """) + """ + ) message2 = dals("has a sha256 mismatch.") assert message1 in error_message assert message2 in error_message - with open(join(prefix, 'condarc'), 'w') as fh: + with open(join(prefix, "condarc"), "w") as fh: fh.write("safety_checks: warn\n") fh.write("extra_safety_checks: true\n") reload_config(prefix) assert context.safety_checks is SafetyChecks.warn - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, '-c', 'conda-test', 'spiffy-test-app=0.5') + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "-c", "conda-test", "spiffy-test-app=0.5" + ) assert message1 in stderr assert message2 in stderr assert package_is_installed(prefix, "spiffy-test-app=0.5") with make_temp_env() as prefix: - with open(join(prefix, 'condarc'), 'a') as fh: + with open(join(prefix, "condarc"), "a") as fh: fh.write("safety_checks: disabled\n") reload_config(prefix) assert context.safety_checks is SafetyChecks.disabled - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, '-c', 'conda-test', 'spiffy-test-app=0.5') + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "-c", "conda-test", "spiffy-test-app=0.5" + ) assert message1 not in stderr assert message2 not in stderr assert package_is_installed(prefix, "spiffy-test-app=0.5") @@ -232,7 +297,7 @@ def test_json_create_install_update_remove(self): def assert_json_parsable(content): string = None try: - for string in content and content.split('\0') or (): + for string in content and content.split("\0") or (): json.loads(string) except Exception as e: log.warn( @@ -240,79 +305,100 @@ def assert_json_parsable(content): " content: %s\n" " string: %s\n" " error: %r", - content, string, e + content, + string, + e, ) raise try: prefix = make_temp_prefix(str(uuid4())[:7]) - stdout, stderr, _ = run_command(Commands.CREATE, prefix, "python=3.8", "--json", "--dry-run", use_exception_handler=True) + stdout, stderr, _ = run_command( + Commands.CREATE, + prefix, + "python=3.8", + "--json", + "--dry-run", + use_exception_handler=True, + ) assert_json_parsable(stdout) # regression test for #5825 # contents of LINK and UNLINK is expected to have Dist format json_obj = json.loads(stdout) - dist_dump = json_obj['actions']['LINK'][0] - assert 'dist_name' in dist_dump + dist_dump = json_obj["actions"]["LINK"][0] + assert "dist_name" in dist_dump - stdout, stderr, _ = run_command(Commands.CREATE, prefix, "python=3.8", "--json") + stdout, stderr, _ = run_command( + Commands.CREATE, prefix, "python=3.8", "--json" + ) assert_json_parsable(stdout) assert not stderr json_obj = json.loads(stdout) - dist_dump = json_obj['actions']['LINK'][0] - assert 'dist_name' in dist_dump + dist_dump = json_obj["actions"]["LINK"][0] + assert "dist_name" in dist_dump - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, 'flask=2.0.1', '--json') + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "flask=2.0.1", "--json" + ) assert_json_parsable(stdout) assert not stderr - assert package_is_installed(prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "flask=2.0.1") + assert package_is_installed(prefix, "python=3") # Test force reinstall - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, '--force-reinstall', 'flask=2.0.1', '--json') + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "--force-reinstall", "flask=2.0.1", "--json" + ) assert_json_parsable(stdout) assert not stderr - assert package_is_installed(prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "flask=2.0.1") + assert package_is_installed(prefix, "python=3") - stdout, stderr, _ = run_command(Commands.UPDATE, prefix, 'flask', '--json') + stdout, stderr, _ = run_command(Commands.UPDATE, prefix, "flask", "--json") assert_json_parsable(stdout) assert not stderr - assert not package_is_installed(prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'flask') - assert package_is_installed(prefix, 'python=3') + assert not package_is_installed(prefix, "flask=2.0.1") + assert package_is_installed(prefix, "flask") + assert package_is_installed(prefix, "python=3") - stdout, stderr, _ = run_command(Commands.REMOVE, prefix, 'flask', '--json') + stdout, stderr, _ = run_command(Commands.REMOVE, prefix, "flask", "--json") assert_json_parsable(stdout) assert not stderr - assert not package_is_installed(prefix, 'flask=2.*') - assert package_is_installed(prefix, 'python=3') + assert not package_is_installed(prefix, "flask=2.*") + assert package_is_installed(prefix, "python=3") # regression test for #5825 # contents of LINK and UNLINK is expected to have Dist format json_obj = json.loads(stdout) - dist_dump = json_obj['actions']['UNLINK'][0] - assert 'dist_name' in dist_dump + dist_dump = json_obj["actions"]["UNLINK"][0] + assert "dist_name" in dist_dump - stdout, stderr, _ = run_command(Commands.LIST, prefix, '--revisions', '--json') + stdout, stderr, _ = run_command( + Commands.LIST, prefix, "--revisions", "--json" + ) assert not stderr json_obj = json.loads(stdout) assert len(json_obj) == 5 assert json_obj[4]["rev"] == 4 - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, '--revision', '0', '--json') + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "--revision", "0", "--json" + ) assert_json_parsable(stdout) assert not stderr - assert not package_is_installed(prefix, 'flask') - assert package_is_installed(prefix, 'python=3') + assert not package_is_installed(prefix, "flask") + assert package_is_installed(prefix, "python=3") finally: rmtree(prefix, ignore_errors=True) def test_not_writable_env_raises_EnvironmentNotWritableError(self): with make_temp_env() as prefix: make_read_only(join(prefix, PREFIX_MAGIC_FILE)) - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "openssl", use_exception_handler=True) + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "openssl", use_exception_handler=True + ) assert "EnvironmentNotWritableError" in stderr assert prefix in stderr @@ -323,7 +409,9 @@ def test_conda_update_package_not_installed(self): with pytest.raises(CondaError) as conda_error: run_command(Commands.UPDATE, prefix, "conda-forge::*") - assert conda_error.value.message.startswith("Invalid spec for 'conda update'") + assert conda_error.value.message.startswith( + "Invalid spec for 'conda update'" + ) def test_noarch_python_package_with_entry_points(self): # this channel has an ancient flask that is incompatible with jinja2>=3.1.0 @@ -331,10 +419,10 @@ def test_noarch_python_package_with_entry_points(self): py_ver = get_python_version_for_prefix(prefix) sp_dir = get_python_site_packages_short_path(py_ver) py_file = sp_dir + "/flask/__init__.py" - pyc_file = pyc_path(py_file, py_ver).replace('/', os.sep) + pyc_file = pyc_path(py_file, py_ver).replace("/", os.sep) assert isfile(join(prefix, py_file)) assert isfile(join(prefix, pyc_file)) - exe_path = join(prefix, get_bin_directory_short_path(), 'flask') + exe_path = join(prefix, get_bin_directory_short_path(), "flask") if on_win: exe_path += ".exe" assert isfile(exe_path) @@ -353,7 +441,7 @@ def test_noarch_python_package_without_entry_points(self): py_ver = get_python_version_for_prefix(prefix) sp_dir = get_python_site_packages_short_path(py_ver) py_file = sp_dir + "/itsdangerous.py" - pyc_file = pyc_path(py_file, py_ver).replace('/', os.sep) + pyc_file = pyc_path(py_file, py_ver).replace("/", os.sep) assert isfile(join(prefix, py_file)) assert isfile(join(prefix, pyc_file)) @@ -363,34 +451,46 @@ def test_noarch_python_package_without_entry_points(self): assert not isfile(join(prefix, pyc_file)) def test_noarch_python_package_reinstall_on_pyver_change(self): - with make_temp_env("-c", "conda-test", "itsdangerous=0.24", "python=3", use_restricted_unicode=on_win) as prefix: + with make_temp_env( + "-c", + "conda-test", + "itsdangerous=0.24", + "python=3", + use_restricted_unicode=on_win, + ) as prefix: py_ver = get_python_version_for_prefix(prefix) - assert py_ver.startswith('3') + assert py_ver.startswith("3") sp_dir = get_python_site_packages_short_path(py_ver) py_file = sp_dir + "/itsdangerous.py" - pyc_file_py3 = pyc_path(py_file, py_ver).replace('/', os.sep) + pyc_file_py3 = pyc_path(py_file, py_ver).replace("/", os.sep) assert isfile(join(prefix, py_file)) assert isfile(join(prefix, pyc_file_py3)) run_command(Commands.INSTALL, prefix, "python=2") - assert not isfile(join(prefix, pyc_file_py3)) # python3 pyc file should be gone + assert not isfile( + join(prefix, pyc_file_py3) + ) # python3 pyc file should be gone py_ver = get_python_version_for_prefix(prefix) - assert py_ver.startswith('2') + assert py_ver.startswith("2") sp_dir = get_python_site_packages_short_path(py_ver) py_file = sp_dir + "/itsdangerous.py" - pyc_file_py2 = pyc_path(py_file, py_ver).replace('/', os.sep) + pyc_file_py2 = pyc_path(py_file, py_ver).replace("/", os.sep) assert isfile(join(prefix, py_file)) assert isfile(join(prefix, pyc_file_py2)) def test_noarch_generic_package(self): with make_temp_env("-c", "conda-test", "font-ttf-inconsolata") as prefix: - assert isfile(join(prefix, 'fonts', 'Inconsolata-Regular.ttf')) + assert isfile(join(prefix, "fonts", "Inconsolata-Regular.ttf")) def test_override_channels(self): with pytest.raises(OperationNotAllowed): - with env_var('CONDA_OVERRIDE_CHANNELS_ENABLED', 'no', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_OVERRIDE_CHANNELS_ENABLED", + "no", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with make_temp_env("--override-channels", "python") as prefix: assert prefix @@ -398,39 +498,58 @@ def test_override_channels(self): with make_temp_env("--override-channels", "python") as prefix: assert prefix - stdout, stderr, _ = run_command(Commands.SEARCH, None, "--override-channels", "-c", "conda-test", "flask", "--json") + stdout, stderr, _ = run_command( + Commands.SEARCH, + None, + "--override-channels", + "-c", + "conda-test", + "flask", + "--json", + ) assert not stderr assert len(json.loads(stdout)["flask"]) < 3 assert json.loads(stdout)["flask"][0]["noarch"] == "python" def test_create_empty_env(self): with make_temp_env() as prefix: - assert exists(join(prefix, 'conda-meta/history')) + assert exists(join(prefix, "conda-meta/history")) list_output = run_command(Commands.LIST, prefix) stdout = list_output[0] stderr = list_output[1] - expected_output = """# packages in environment at %s: + expected_output = ( + """# packages in environment at %s: # # Name Version Build Channel -""" % prefix +""" + % prefix + ) self.assertEqual(stdout, expected_output) - self.assertEqual(stderr, '') + self.assertEqual(stderr, "") - revision_output = run_command(Commands.LIST, prefix, '--revisions') + revision_output = run_command(Commands.LIST, prefix, "--revisions") stdout = revision_output[0] stderr = revision_output[1] - assert stderr == '' + assert stderr == "" self.assertIsInstance(stdout, str) @pytest.mark.skipif(reason="conda-forge doesn't have a full set of packages") def test_strict_channel_priority(self): with make_temp_env() as prefix: stdout, stderr, rc = run_command( - Commands.CREATE, prefix, - "-c", "conda-forge", "-c", "defaults", "python=3.6", "quaternion", - "--strict-channel-priority", "--dry-run", "--json", - use_exception_handler=True + Commands.CREATE, + prefix, + "-c", + "conda-forge", + "-c", + "defaults", + "python=3.6", + "quaternion", + "--strict-channel-priority", + "--dry-run", + "--json", + use_exception_handler=True, ) assert not rc json_obj = json_loads(stdout) @@ -439,48 +558,69 @@ def test_strict_channel_priority(self): # Rather than spending more time looking for another package, just filter it out. # Same thing for Windows, this is because we use MKL always. Perhaps there's a # way to exclude it, I tried the "nomkl" package but that did not work. - json_obj["actions"]["LINK"] = [link for link in json_obj["actions"]["LINK"] - if link['name'] not in ('libcxx', 'libcxxabi', 'mkl', 'intel-openmp')] - channel_groups = groupby(lambda x: x["channel"], json_obj["actions"]["LINK"]) + json_obj["actions"]["LINK"] = [ + link + for link in json_obj["actions"]["LINK"] + if link["name"] not in ("libcxx", "libcxxabi", "mkl", "intel-openmp") + ] + channel_groups = groupby( + lambda x: x["channel"], json_obj["actions"]["LINK"] + ) channel_groups = sorted(list(channel_groups)) - assert channel_groups == ["conda-forge",] + assert channel_groups == [ + "conda-forge", + ] def test_strict_resolve_get_reduced_index(self): channels = (Channel("defaults"),) specs = (MatchSpec("anaconda"),) - index = get_reduced_index(None, channels, context.subdirs, specs, 'repodata.json') + index = get_reduced_index( + None, channels, context.subdirs, specs, "repodata.json" + ) r = Resolve(index, channels=channels) - with env_var("CONDA_CHANNEL_PRIORITY", "strict", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CHANNEL_PRIORITY", + "strict", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): reduced_index = r.get_reduced_index(specs) channel_name_groups = { name: {prec.channel.name for prec in group} for name, group in groupby(lambda x: x["name"], reduced_index).items() } channel_name_groups = { - name: channel_names for name, channel_names in channel_name_groups.items() + name: channel_names + for name, channel_names in channel_name_groups.items() if len(channel_names) > 1 } assert {} == channel_name_groups def test_list_with_pip_no_binary(self): from conda.exports import rm_rf as _rm_rf + # For this test to work on Windows, you can either pass use_restricted_unicode=on_win # to make_temp_env(), or you can set PYTHONUTF8 to 1 (and use Python 3.7 or above). # We elect to test the more complex of the two options. py_ver = "3.10" - with make_temp_env("python="+py_ver, "pip") as prefix: + with make_temp_env("python=" + py_ver, "pip") as prefix: evs = {"PYTHONUTF8": "1"} # This test does not activate the env. if on_win: - evs['CONDA_DLL_SEARCH_MODIFICATION_ENABLE'] = '1' + evs["CONDA_DLL_SEARCH_MODIFICATION_ENABLE"] = "1" with env_vars(evs, stack_callback=conda_tests_ctxt_mgmt_def_pol): - check_call(PYTHON_BINARY + " -m pip install --no-binary flask flask==1.0.2", - cwd=prefix, shell=True) + check_call( + PYTHON_BINARY + " -m pip install --no-binary flask flask==1.0.2", + cwd=prefix, + shell=True, + ) PrefixData._cache_.clear() stdout, stderr, _ = run_command(Commands.LIST, prefix) - stdout_lines = stdout.split('\n') - assert any(line.endswith("pypi") for line in stdout_lines - if line.lower().startswith("flask")) + stdout_lines = stdout.split("\n") + assert any( + line.endswith("pypi") + for line in stdout_lines + if line.lower().startswith("flask") + ) # regression test for #5847 # when using rm_rf on a directory @@ -492,11 +632,11 @@ def test_list_with_pip_wheel(self): from conda.exports import rm_rf as _rm_rf py_ver = "3.10" - with make_temp_env("python="+py_ver, "pip") as prefix: + with make_temp_env("python=" + py_ver, "pip") as prefix: evs = {"PYTHONUTF8": "1"} # This test does not activate the env. if on_win: - evs['CONDA_DLL_SEARCH_MODIFICATION_ENABLE'] = '1' + evs["CONDA_DLL_SEARCH_MODIFICATION_ENABLE"] = "1" with env_vars(evs, stack_callback=conda_tests_ctxt_mgmt_def_pol): check_call( PYTHON_BINARY + " -m pip install flask==1.0.2", @@ -505,9 +645,12 @@ def test_list_with_pip_wheel(self): ) PrefixData._cache_.clear() stdout, stderr, _ = run_command(Commands.LIST, prefix) - stdout_lines = stdout.split('\n') - assert any(line.endswith("pypi") for line in stdout_lines - if line.lower().startswith("flask")) + stdout_lines = stdout.split("\n") + assert any( + line.endswith("pypi") + for line in stdout_lines + if line.lower().startswith("flask") + ) # regression test for #3433 run_command(Commands.INSTALL, prefix, "python=3.9", no_capture=True) @@ -516,7 +659,9 @@ def test_list_with_pip_wheel(self): # regression test for #5847 # when using rm_rf on a file assert prefix in PrefixData._cache_ - _rm_rf(join(prefix, get_python_site_packages_short_path("3.9")), "os.py") + _rm_rf( + join(prefix, get_python_site_packages_short_path("3.9")), "os.py" + ) assert prefix not in PrefixData._cache_ # regression test for #5980, related to #5847 @@ -534,41 +679,46 @@ def test_list_with_pip_wheel(self): def test_compare_success(self): with make_temp_env("python=3.6", "flask=1.0.2", "bzip2=1.0.8") as prefix: - env_file = join(prefix, 'env.yml') + env_file = join(prefix, "env.yml") touch(env_file) with open(env_file, "w") as f: f.write( -"""name: dummy + """name: dummy channels: - defaults dependencies: - bzip2=1.0.8 - - flask>=1.0.1,<=1.0.4""") + - flask>=1.0.1,<=1.0.4""" + ) output, _, _ = run_command(Commands.COMPARE, prefix, env_file, "--json") assert "Success" in output rmtree(prefix, ignore_errors=True) def test_compare_fail(self): with make_temp_env("python=3.6", "flask=1.0.2", "bzip2=1.0.8") as prefix: - env_file = join(prefix, 'env.yml') + env_file = join(prefix, "env.yml") touch(env_file) with open(env_file, "w") as f: f.write( -"""name: dummy + """name: dummy channels: - defaults dependencies: - yaml - - flask=1.0.3""") + - flask=1.0.3""" + ) output, _, _ = run_command(Commands.COMPARE, prefix, env_file, "--json") assert "yaml not found" in output - assert "flask found but mismatch. Specification pkg: flask=1.0.3, Running pkg: flask==1.0.2=py36_1" in output + assert ( + "flask found but mismatch. Specification pkg: flask=1.0.3, Running pkg: flask==1.0.2=py36_1" + in output + ) rmtree(prefix, ignore_errors=True) def test_install_tarball_from_local_channel(self): # Regression test for #2812 # install from local channel - ''' + """ path = u'/private/var/folders/y1/ljv50nrs49gdqkrp01wy3_qm0000gn/T/pytest-of-rdonnelly/pytest-16/test_install_tarball_from_loca0/c352_çñßôêá' if on_win: path = u'C:\\çñ' @@ -583,41 +733,51 @@ def test_install_tarball_from_local_channel(self): assert path == path2 assert type(path) == type(path2) # path_to_url("c:\\users\\est_install_tarball_from_loca0\a48a_6f154a82dbe3c7") - ''' + """ with make_temp_env() as prefix, make_temp_channel(["flask-2.1.3"]) as channel: - run_command(Commands.INSTALL, prefix, '-c', channel, 'flask=2.1.3', '--json') - assert package_is_installed(prefix, channel + '::' + 'flask') - flask_fname = [p for p in PrefixData(prefix).iter_records() if p['name'] == 'flask'][0]['fn'] + run_command( + Commands.INSTALL, prefix, "-c", channel, "flask=2.1.3", "--json" + ) + assert package_is_installed(prefix, channel + "::" + "flask") + flask_fname = [ + p for p in PrefixData(prefix).iter_records() if p["name"] == "flask" + ][0]["fn"] - run_command(Commands.REMOVE, prefix, 'flask') - assert not package_is_installed(prefix, 'flask=0') + run_command(Commands.REMOVE, prefix, "flask") + assert not package_is_installed(prefix, "flask=0") # Regression test for 2970 # install from build channel as a tarball tar_path = join(PackageCacheData.first_writable().pkgs_dir, flask_fname) if not os.path.isfile(tar_path): - tar_path = tar_path.replace('.conda', '.tar.bz2') - conda_bld = join(dirname(PackageCacheData.first_writable().pkgs_dir), 'conda-bld') + tar_path = tar_path.replace(".conda", ".tar.bz2") + conda_bld = join( + dirname(PackageCacheData.first_writable().pkgs_dir), "conda-bld" + ) conda_bld_sub = join(conda_bld, context.subdir) if not isdir(conda_bld_sub): os.makedirs(conda_bld_sub) tar_bld_path = join(conda_bld_sub, basename(tar_path)) copyfile(tar_path, tar_bld_path) run_command(Commands.INSTALL, prefix, tar_bld_path) - assert package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "flask") # Regression test for #462 with make_temp_env(tar_bld_path) as prefix2: - assert package_is_installed(prefix2, 'flask') + assert package_is_installed(prefix2, "flask") def test_tarball_install(self): - with make_temp_env('bzip2') as prefix: + with make_temp_env("bzip2") as prefix: # We have a problem. If bzip2 is extracted already but the tarball is missing then this fails. - bzip2_data = [p for p in PrefixData(prefix).iter_records() if p['name'] == 'bzip2'][0] - bzip2_fname = bzip2_data['fn'] + bzip2_data = [ + p for p in PrefixData(prefix).iter_records() if p["name"] == "bzip2" + ][0] + bzip2_fname = bzip2_data["fn"] tar_old_path = join(PackageCacheData.first_writable().pkgs_dir, bzip2_fname) if not isfile(tar_old_path): - log.warning("Installing bzip2 failed to save the compressed package, downloading it 'manually' ..") + log.warning( + "Installing bzip2 failed to save the compressed package, downloading it 'manually' .." + ) # Downloading to the package cache causes some internal inconsistency here: # # File "/Users/rdonnelly/conda/conda/conda/common/path.py", line 72, in url_to_path @@ -627,8 +787,15 @@ def test_tarball_install(self): # .. so download to the root of the prefix instead. tar_old_path = join(prefix, bzip2_fname) from conda.gateways.connection.download import download - download('https://repo.anaconda.com/pkgs/main/' + bzip2_data.subdir + '/' + bzip2_fname, - tar_old_path, None) + + download( + "https://repo.anaconda.com/pkgs/main/" + + bzip2_data.subdir + + "/" + + bzip2_fname, + tar_old_path, + None, + ) assert isfile(tar_old_path), f"Failed to cache:\n{tar_old_path}" # It would be nice to be able to do this, but the cache folder name comes from # the file name and that is then all out of whack with the metadata. @@ -636,79 +803,88 @@ def test_tarball_install(self): tar_new_path = join(prefix, bzip2_fname) run_command(Commands.RUN, prefix, cp_or_copy, tar_old_path, tar_new_path) - assert isfile(tar_new_path), f"Failed to copy:\n{tar_old_path}\nto:\n{tar_new_path}" + assert isfile( + tar_new_path + ), f"Failed to copy:\n{tar_old_path}\nto:\n{tar_new_path}" run_command(Commands.INSTALL, prefix, tar_new_path) - assert package_is_installed(prefix, 'bzip2') + assert package_is_installed(prefix, "bzip2") def test_tarball_install_and_bad_metadata(self): with make_temp_env("python=3.10.9", "flask=1.1.1", "--json") as prefix: - assert package_is_installed(prefix, 'flask==1.1.1') - flask_data = [p for p in PrefixData(prefix).iter_records() if p['name'] == 'flask'][0] - run_command(Commands.REMOVE, prefix, 'flask') - assert not package_is_installed(prefix, 'flask==1.1.1') - assert package_is_installed(prefix, 'python') + assert package_is_installed(prefix, "flask==1.1.1") + flask_data = [ + p for p in PrefixData(prefix).iter_records() if p["name"] == "flask" + ][0] + run_command(Commands.REMOVE, prefix, "flask") + assert not package_is_installed(prefix, "flask==1.1.1") + assert package_is_installed(prefix, "python") - flask_fname = flask_data['fn'] + flask_fname = flask_data["fn"] tar_old_path = join(PackageCacheData.first_writable().pkgs_dir, flask_fname) # if a .tar.bz2 is already in the file cache, it's fine. Accept it or the .conda file here. if not isfile(tar_old_path): - tar_old_path = tar_old_path.replace('.conda', '.tar.bz2') + tar_old_path = tar_old_path.replace(".conda", ".tar.bz2") assert isfile(tar_old_path) with pytest.raises(DryRunExit): run_command(Commands.INSTALL, prefix, tar_old_path, "--dry-run") - assert not package_is_installed(prefix, 'flask=1.*') + assert not package_is_installed(prefix, "flask=1.*") # regression test for #2886 (part 1 of 2) # install tarball from package cache, default channel run_command(Commands.INSTALL, prefix, tar_old_path) - assert package_is_installed(prefix, 'flask=1.*') + assert package_is_installed(prefix, "flask=1.*") # regression test for #2626 # install tarball with full path, outside channel tar_new_path = join(prefix, flask_fname) copyfile(tar_old_path, tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) - assert package_is_installed(prefix, 'flask=1') + assert package_is_installed(prefix, "flask=1") # regression test for #2626 # install tarball with relative path, outside channel - run_command(Commands.REMOVE, prefix, 'flask') - assert not package_is_installed(prefix, 'flask=1.1.1') + run_command(Commands.REMOVE, prefix, "flask") + assert not package_is_installed(prefix, "flask=1.1.1") tar_new_path = relpath(tar_new_path) run_command(Commands.INSTALL, prefix, tar_new_path) - assert package_is_installed(prefix, 'flask=1') + assert package_is_installed(prefix, "flask=1") # regression test for #2886 (part 2 of 2) # install tarball from package cache, local channel - run_command(Commands.REMOVE, prefix, 'flask', '--json') - assert not package_is_installed(prefix, 'flask=1') + run_command(Commands.REMOVE, prefix, "flask", "--json") + assert not package_is_installed(prefix, "flask=1") run_command(Commands.INSTALL, prefix, tar_old_path) # The last install was from the `local::` channel - assert package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "flask") # regression test for #2599 # ignore json files in conda-meta that don't conform to name-version-build.json if not on_win: # xz is only a python dependency on unix xz_prec = next(PrefixData(prefix).query("xz")) - dist_name = xz_prec.dist_str().split('::')[-1] - xz_prefix_data_json_path = join(prefix, 'conda-meta', dist_name + '.json') - copyfile(xz_prefix_data_json_path, - join(prefix, 'conda-meta', 'xz.json')) + dist_name = xz_prec.dist_str().split("::")[-1] + xz_prefix_data_json_path = join( + prefix, "conda-meta", dist_name + ".json" + ) + copyfile( + xz_prefix_data_json_path, join(prefix, "conda-meta", "xz.json") + ) rm_rf(xz_prefix_data_json_path) assert not lexists(xz_prefix_data_json_path) PrefixData._cache_ = {} - assert not package_is_installed(prefix, 'xz') + assert not package_is_installed(prefix, "xz") @pytest.mark.skipif(on_win, reason="windows python doesn't depend on readline") def test_update_with_pinned_packages(self): # regression test for #6914 - with make_temp_env("-c", "https://repo.anaconda.com/pkgs/free", "python=2.7.12") as prefix: + with make_temp_env( + "-c", "https://repo.anaconda.com/pkgs/free", "python=2.7.12" + ) as prefix: assert package_is_installed(prefix, "readline=6.2") # removing the history allows python to be updated too - open(join(prefix, 'conda-meta', 'history'), 'w').close() + open(join(prefix, "conda-meta", "history"), "w").close() PrefixData._cache_.clear() run_command(Commands.UPDATE, prefix, "readline", no_capture=True) assert package_is_installed(prefix, "readline") @@ -718,145 +894,172 @@ def test_update_with_pinned_packages(self): def test_pinned_override_with_explicit_spec(self): with make_temp_env("python=3.9") as prefix: - run_command(Commands.CONFIG, prefix, - "--add", "pinned_packages", "python=3.9.16") + run_command( + Commands.CONFIG, prefix, "--add", "pinned_packages", "python=3.9.16" + ) run_command(Commands.INSTALL, prefix, "python=3.10", no_capture=True) assert package_is_installed(prefix, "python=3.10") def test_remove_all(self): with make_temp_env("python") as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'python') + assert package_is_installed(prefix, "python") # regression test for #2154 with pytest.raises(PackagesNotFoundError) as exc: - run_command(Commands.REMOVE, prefix, 'python', 'foo', 'numpy') + run_command(Commands.REMOVE, prefix, "python", "foo", "numpy") exception_string = repr(exc.value) assert "PackagesNotFoundError" in exception_string assert "- numpy" in exception_string assert "- foo" in exception_string - run_command(Commands.REMOVE, prefix, '--all') + run_command(Commands.REMOVE, prefix, "--all") assert path_is_clean(prefix) - @pytest.mark.skipif(on_win, reason="windows usually doesn't support symlinks out-of-the box") - @patch('conda.core.link.hardlink_supported', side_effect=lambda x, y: False) + @pytest.mark.skipif( + on_win, reason="windows usually doesn't support symlinks out-of-the box" + ) + @patch("conda.core.link.hardlink_supported", side_effect=lambda x, y: False) def test_allow_softlinks(self, hardlink_supported_mock): hardlink_supported_mock._result_cache.clear() - with env_var("CONDA_ALLOW_SOFTLINKS", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_ALLOW_SOFTLINKS", + "true", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with make_temp_env("pip") as prefix: - assert islink(join(prefix, get_python_site_packages_short_path( - get_python_version_for_prefix(prefix)), 'pip', '__init__.py')) + assert islink( + join( + prefix, + get_python_site_packages_short_path( + get_python_version_for_prefix(prefix) + ), + "pip", + "__init__.py", + ) + ) hardlink_supported_mock._result_cache.clear() @pytest.mark.skipif(on_win, reason="nomkl not present on windows") def test_remove_features(self): with make_temp_env("python=2", "numpy=1.13", "nomkl") as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'numpy') - assert package_is_installed(prefix, 'nomkl') - assert not package_is_installed(prefix, 'mkl') + assert package_is_installed(prefix, "numpy") + assert package_is_installed(prefix, "nomkl") + assert not package_is_installed(prefix, "mkl") # A consequence of discontinuing use of the 'features' key and instead # using direct dependencies is that removing the feature means that # packages associated with the track_features base package are completely removed # and not replaced with equivalent non-variant packages as before. - run_command(Commands.REMOVE, prefix, '--features', 'nomkl') + run_command(Commands.REMOVE, prefix, "--features", "nomkl") # assert package_is_installed(prefix, 'numpy') # removed per above comment - assert not package_is_installed(prefix, 'nomkl') + assert not package_is_installed(prefix, "nomkl") # assert package_is_installed(prefix, 'mkl') # removed per above comment - @pytest.mark.skipif(on_win and context.bits == 32, reason="no 32-bit windows python on conda-forge") + @pytest.mark.skipif( + on_win and context.bits == 32, reason="no 32-bit windows python on conda-forge" + ) @pytest.mark.flaky(reruns=2) def test_dash_c_usage_replacing_python(self): # Regression test for #2606 - with make_temp_env("-c", "conda-forge", "python=3.10", no_capture=True) as prefix: + with make_temp_env( + "-c", "conda-forge", "python=3.10", no_capture=True + ) as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'conda-forge::python=3.10') + assert package_is_installed(prefix, "conda-forge::python=3.10") run_command(Commands.INSTALL, prefix, "decorator") - assert package_is_installed(prefix, 'conda-forge::python=3.10') + assert package_is_installed(prefix, "conda-forge::python=3.10") - with make_temp_env('--clone', prefix) as clone_prefix: - assert package_is_installed(clone_prefix, 'conda-forge::python=3.10') + with make_temp_env("--clone", prefix) as clone_prefix: + assert package_is_installed(clone_prefix, "conda-forge::python=3.10") assert package_is_installed(clone_prefix, "decorator") # Regression test for #2645 - fn = glob(join(prefix, 'conda-meta', 'python-3.10*.json'))[-1] + fn = glob(join(prefix, "conda-meta", "python-3.10*.json"))[-1] with open(fn) as f: data = json.load(f) - for field in ('url', 'channel', 'schannel'): + for field in ("url", "channel", "schannel"): if field in data: del data[field] - with open(fn, 'w') as f: + with open(fn, "w") as f: json.dump(data, f) PrefixData._cache_ = {} - with make_temp_env('-c', 'conda-forge', '--clone', prefix) as clone_prefix: - assert package_is_installed(clone_prefix, 'python=3.10') - assert package_is_installed(clone_prefix, 'decorator') + with make_temp_env("-c", "conda-forge", "--clone", prefix) as clone_prefix: + assert package_is_installed(clone_prefix, "python=3.10") + assert package_is_installed(clone_prefix, "decorator") def test_install_prune_flag(self): with make_temp_env("python=3", "flask") as prefix: - assert package_is_installed(prefix, 'flask') - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "flask") + assert package_is_installed(prefix, "python=3") run_command(Commands.REMOVE, prefix, "flask") - assert not package_is_installed(prefix, 'flask') + assert not package_is_installed(prefix, "flask") # this should get pruned when flask is removed - assert not package_is_installed(prefix, 'itsdangerous') - assert package_is_installed(prefix, 'python=3') + assert not package_is_installed(prefix, "itsdangerous") + assert package_is_installed(prefix, "python=3") @pytest.mark.skipif(on_win, reason="readline is only a python dependency on unix") def test_remove_force_remove_flag(self): with make_temp_env("python") as prefix: - assert package_is_installed(prefix, 'readline') - assert package_is_installed(prefix, 'python') + assert package_is_installed(prefix, "readline") + assert package_is_installed(prefix, "python") - run_command(Commands.REMOVE, prefix, 'readline', '--force-remove') - assert not package_is_installed(prefix, 'readline') - assert package_is_installed(prefix, 'python') + run_command(Commands.REMOVE, prefix, "readline", "--force-remove") + assert not package_is_installed(prefix, "readline") + assert package_is_installed(prefix, "python") def test_install_force_reinstall_flag(self): with make_temp_env("python") as prefix: - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, - "--json", "--dry-run", "--force-reinstall", "python", - use_exception_handler=True) + stdout, stderr, _ = run_command( + Commands.INSTALL, + prefix, + "--json", + "--dry-run", + "--force-reinstall", + "python", + use_exception_handler=True, + ) output_obj = json.loads(stdout.strip()) - unlink_actions = output_obj['actions']['UNLINK'] - link_actions = output_obj['actions']['LINK'] + unlink_actions = output_obj["actions"]["UNLINK"] + link_actions = output_obj["actions"]["LINK"] assert len(unlink_actions) == len(link_actions) == 1 assert unlink_actions[0] == link_actions[0] - assert unlink_actions[0]['name'] == 'python' + assert unlink_actions[0]["name"] == "python" def test_create_no_deps_flag(self): with make_temp_env("python=2", "flask", "--no-deps") as prefix: - assert package_is_installed(prefix, 'flask') - assert package_is_installed(prefix, 'python=2') - assert not package_is_installed(prefix, 'openssl') - assert not package_is_installed(prefix, 'itsdangerous') + assert package_is_installed(prefix, "flask") + assert package_is_installed(prefix, "python=2") + assert not package_is_installed(prefix, "openssl") + assert not package_is_installed(prefix, "itsdangerous") def test_create_only_deps_flag(self): - with make_temp_env("python=2", "flask", "--only-deps", no_capture=True) as prefix: - assert not package_is_installed(prefix, 'flask') - assert package_is_installed(prefix, 'python') + with make_temp_env( + "python=2", "flask", "--only-deps", no_capture=True + ) as prefix: + assert not package_is_installed(prefix, "flask") + assert package_is_installed(prefix, "python") if not on_win: # sqlite is a dependency of Python on all platforms - assert package_is_installed(prefix, 'sqlite') - assert package_is_installed(prefix, 'itsdangerous') + assert package_is_installed(prefix, "sqlite") + assert package_is_installed(prefix, "itsdangerous") # test that a later install keeps the --only-deps packages around run_command(Commands.INSTALL, prefix, "imagesize", no_capture=True) - assert package_is_installed(prefix, 'itsdangerous') - assert not package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "itsdangerous") + assert not package_is_installed(prefix, "flask") # test that --only-deps installed stuff survives updates of unrelated packages run_command(Commands.UPDATE, prefix, "imagesize", no_capture=True) - assert package_is_installed(prefix, 'itsdangerous') - assert not package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "itsdangerous") + assert not package_is_installed(prefix, "flask") # test that --only-deps installed stuff survives removal of unrelated packages run_command(Commands.REMOVE, prefix, "imagesize", no_capture=True) - assert package_is_installed(prefix, 'itsdangerous') - assert not package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "itsdangerous") + assert not package_is_installed(prefix, "flask") def test_install_update_deps_flag(self): with make_temp_env("flask=2.0.1", "jinja2=3.0.1") as prefix: @@ -904,11 +1107,11 @@ def test_install_update_deps_only_deps_flags(self): assert package_is_installed(prefix, "flask=2.0.1") assert package_is_installed(prefix, "jinja2>3.0.1") - - @pytest.mark.xfail(on_win, reason="nomkl not present on windows", - strict=True) + @pytest.mark.xfail(on_win, reason="nomkl not present on windows", strict=True) def test_install_features(self): - with make_temp_env("python=2", "numpy=1.13", "nomkl", no_capture=True) as prefix: + with make_temp_env( + "python=2", "numpy=1.13", "nomkl", no_capture=True + ) as prefix: assert package_is_installed(prefix, "numpy") assert package_is_installed(prefix, "nomkl") assert not package_is_installed(prefix, "mkl") @@ -928,70 +1131,99 @@ def test_install_features(self): def test_clone_offline_simple(self): with make_temp_env("bzip2") as prefix: - assert package_is_installed(prefix, 'bzip2') + assert package_is_installed(prefix, "bzip2") - with make_temp_env('--clone', prefix, '--offline') as clone_prefix: + with make_temp_env("--clone", prefix, "--offline") as clone_prefix: assert context.offline - assert package_is_installed(clone_prefix, 'bzip2') + assert package_is_installed(clone_prefix, "bzip2") def test_conda_config_describe(self): with make_temp_env() as prefix: stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--describe") assert not stderr - skip_categories = ('CLI-only', 'Hidden and Undocumented') - documented_parameter_names = chain.from_iterable(( - parameter_names for category, parameter_names in context.category_map.items() - if category not in skip_categories - )) + skip_categories = ("CLI-only", "Hidden and Undocumented") + documented_parameter_names = chain.from_iterable( + ( + parameter_names + for category, parameter_names in context.category_map.items() + if category not in skip_categories + ) + ) for param_name in documented_parameter_names: - assert re.search(r'^# # %s \(' % param_name, stdout, re.MULTILINE), param_name + assert re.search( + r"^# # %s \(" % param_name, stdout, re.MULTILINE + ), param_name - stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--describe", "--json") + stdout, stderr, _ = run_command( + Commands.CONFIG, prefix, "--describe", "--json" + ) assert not stderr json_obj = json.loads(stdout.strip()) assert len(json_obj) >= 55 - assert 'description' in json_obj[0] + assert "description" in json_obj[0] - with env_var('CONDA_QUIET', 'yes', stack_callback=conda_tests_ctxt_mgmt_def_pol): - stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show-sources") + with env_var( + "CONDA_QUIET", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + stdout, stderr, _ = run_command( + Commands.CONFIG, prefix, "--show-sources" + ) assert not stderr - assert 'envvars' in stdout.strip() + assert "envvars" in stdout.strip() - stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show-sources", "--json") + stdout, stderr, _ = run_command( + Commands.CONFIG, prefix, "--show-sources", "--json" + ) assert not stderr json_obj = json.loads(stdout.strip()) - assert "quiet" in json_obj["envvars"] and json_obj["envvars"]["quiet"] is True + assert ( + "quiet" in json_obj["envvars"] + and json_obj["envvars"]["quiet"] is True + ) assert json_obj["cmd_line"] == {"json": True} run_command(Commands.CONFIG, prefix, "--set", "changeps1", "false") with pytest.raises(CondaError): run_command(Commands.CONFIG, prefix, "--write-default") - rm_rf(join(prefix, 'condarc')) + rm_rf(join(prefix, "condarc")) run_command(Commands.CONFIG, prefix, "--write-default") - with open(join(prefix, 'condarc')) as fh: + with open(join(prefix, "condarc")) as fh: data = fh.read() for param_name in documented_parameter_names: - assert re.search(r'^# %s \(' % param_name, data, re.MULTILINE), param_name + assert re.search( + r"^# %s \(" % param_name, data, re.MULTILINE + ), param_name - stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--describe", "--json") + stdout, stderr, _ = run_command( + Commands.CONFIG, prefix, "--describe", "--json" + ) assert not stderr json_obj = json.loads(stdout.strip()) assert len(json_obj) >= 42 - assert 'description' in json_obj[0] + assert "description" in json_obj[0] - with env_var('CONDA_QUIET', 'yes', stack_callback=conda_tests_ctxt_mgmt_def_pol): - stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show-sources") + with env_var( + "CONDA_QUIET", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + stdout, stderr, _ = run_command( + Commands.CONFIG, prefix, "--show-sources" + ) assert not stderr - assert 'envvars' in stdout.strip() + assert "envvars" in stdout.strip() - stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show-sources", "--json") + stdout, stderr, _ = run_command( + Commands.CONFIG, prefix, "--show-sources", "--json" + ) assert not stderr json_obj = json.loads(stdout.strip()) - assert "quiet" in json_obj["envvars"] and json_obj["envvars"]["quiet"] is True + assert ( + "quiet" in json_obj["envvars"] + and json_obj["envvars"]["quiet"] is True + ) assert json_obj["cmd_line"] == {"json": True} def test_conda_config_validate(self): @@ -1002,9 +1234,9 @@ def test_conda_config_validate(self): assert not stderr try: - with open(join(prefix, 'condarc'), 'w') as fh: - fh.write('default_python: anaconda\n') - fh.write('ssl_verify: /path/doesnt/exist\n') + with open(join(prefix, "condarc"), "w") as fh: + fh.write("default_python: anaconda\n") + fh.write("ssl_verify: /path/doesnt/exist\n") reload_config(prefix) with pytest.raises(CondaMultiError) as exc: @@ -1028,35 +1260,50 @@ def test_conda_config_validate(self): reason="Skip unsupported platforms", ) def test_rpy_search(self): - with make_temp_env("python=3.5", "--override-channels", "-c", "defaults") as prefix: - payload, _, _ = run_command(Commands.CONFIG, prefix, "--get", "channels", "--json") + with make_temp_env( + "python=3.5", "--override-channels", "-c", "defaults" + ) as prefix: + payload, _, _ = run_command( + Commands.CONFIG, prefix, "--get", "channels", "--json" + ) default_channels = json_loads(payload)["get"].get("channels", ["defaults"]) - run_command(Commands.CONFIG, prefix, "--add", "channels", "https://repo.anaconda.com/pkgs/free") + run_command( + Commands.CONFIG, + prefix, + "--add", + "channels", + "https://repo.anaconda.com/pkgs/free", + ) # config --append on an empty key pre-populates it with the hardcoded default value! for channel in default_channels: run_command(Commands.CONFIG, prefix, "--remove", "channels", channel) stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show", "--json") json_obj = json_loads(stdout) - assert 'defaults' not in json_obj['channels'] + assert "defaults" not in json_obj["channels"] - assert package_is_installed(prefix, 'python') - assert 'r' not in context.channels + assert package_is_installed(prefix, "python") + assert "r" not in context.channels # assert conda search cannot find rpy2 - stdout, stderr, _ = run_command(Commands.SEARCH, prefix, "rpy2", "--json", use_exception_handler=True) - json_obj = json_loads(stdout.replace("Fetching package metadata ...", "").strip()) - assert json_obj['exception_name'] == 'PackagesNotFoundError' + stdout, stderr, _ = run_command( + Commands.SEARCH, prefix, "rpy2", "--json", use_exception_handler=True + ) + json_obj = json_loads( + stdout.replace("Fetching package metadata ...", "").strip() + ) + assert json_obj["exception_name"] == "PackagesNotFoundError" # add r channel run_command(Commands.CONFIG, prefix, "--add", "channels", "r") stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show", "--json") json_obj = json_loads(stdout) - assert 'r' in json_obj['channels'] + assert "r" in json_obj["channels"] # assert conda search can now find rpy2 stdout, stderr, _ = run_command(Commands.SEARCH, prefix, "rpy2", "--json") - json_obj = json_loads(stdout.replace("Fetching package metadata ...", "").strip()) - + json_obj = json_loads( + stdout.replace("Fetching package metadata ...", "").strip() + ) def _test_compile_pyc(self, use_sys_python=False): evs = {} @@ -1073,21 +1320,23 @@ def _test_compile_pyc(self, use_sys_python=False): if use_sys_python: python_binary = sys.executable else: - python_binary = join(prefix, 'python.exe' if on_win else 'bin/python') + python_binary = join( + prefix, "python.exe" if on_win else "bin/python" + ) assert os.path.isfile(python_binary), "Cannot even find Python" if on_win: - site_packages = join('Lib', 'site-packages') + site_packages = join("Lib", "site-packages") else: - site_packages = join('lib', 'python', py_ver) + site_packages = join("lib", "python", py_ver) - test_py_path = join(prefix, site_packages, 'test_compile.py') - test_pyc_path = pyc_path(test_py_path, py_ver).replace('/', os.sep) + test_py_path = join(prefix, site_packages, "test_compile.py") + test_pyc_path = pyc_path(test_py_path, py_ver).replace("/", os.sep) os.makedirs(dirname(test_py_path), exist_ok=True) os.makedirs(dirname(test_pyc_path), exist_ok=True) - with open(test_py_path, 'w') as test_py_file: + with open(test_py_path, "w") as test_py_file: test_py_file.write("__version__ = 1.0") compile_multiple_pyc( @@ -1097,7 +1346,9 @@ def _test_compile_pyc(self, use_sys_python=False): prefix, py_ver, ) - assert isfile(test_pyc_path), f"Failed to generate expected .pyc file {test_pyc_path}" + assert isfile( + test_pyc_path + ), f"Failed to generate expected .pyc file {test_pyc_path}" def test_compile_pyc_sys_python(self): return self._test_compile_pyc(use_sys_python=True) @@ -1106,58 +1357,85 @@ def test_compile_pyc_new_python(self): return self._test_compile_pyc(use_sys_python=False) def test_conda_run_1(self): - with make_temp_env(use_restricted_unicode=False, name=str(uuid4())[:7]) as prefix: - output, error, rc = run_command(Commands.RUN, prefix, 'echo', 'hello') - assert output == 'hello' + os.linesep + with make_temp_env( + use_restricted_unicode=False, name=str(uuid4())[:7] + ) as prefix: + output, error, rc = run_command(Commands.RUN, prefix, "echo", "hello") + assert output == "hello" + os.linesep assert not error assert rc == 0 - output, error, rc = run_command(Commands.RUN, prefix, 'exit', '5') + output, error, rc = run_command(Commands.RUN, prefix, "exit", "5") assert not output assert not error assert rc == 5 def test_conda_run_nonexistant_prefix(self): - with make_temp_env(use_restricted_unicode=False, name=str(uuid4())[:7]) as prefix: + with make_temp_env( + use_restricted_unicode=False, name=str(uuid4())[:7] + ) as prefix: prefix = join(prefix, "clearly_a_prefix_that_does_not_exist") with pytest.raises(EnvironmentLocationNotFound): - output, error, rc = run_command(Commands.RUN, prefix, 'echo', 'hello') + output, error, rc = run_command(Commands.RUN, prefix, "echo", "hello") def test_conda_run_prefix_not_a_conda_env(self): with tempdir() as prefix: with pytest.raises(DirectoryNotACondaEnvironmentError): - output, error, rc = run_command(Commands.RUN, prefix, 'echo', 'hello') - + output, error, rc = run_command(Commands.RUN, prefix, "echo", "hello") def test_clone_offline_multichannel_with_untracked(self): - with env_vars({ - "CONDA_DLL_SEARCH_MODIFICATION_ENABLE": "1", - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_DLL_SEARCH_MODIFICATION_ENABLE": "1", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): # The flask install will use this version of Python. That is then used to compile flask's pycs. - flask_python = '3.8' # oldest available for osx-arm64 + flask_python = "3.8" # oldest available for osx-arm64 with make_temp_env("python=3.9", use_restricted_unicode=True) as prefix: - payload, _, _ = run_command(Commands.CONFIG, prefix, "--get", "channels", "--json") - default_channels = json_loads(payload)["get"].get("channels", ["defaults"]) - run_command(Commands.CONFIG, prefix, "--add", "channels", "https://repo.anaconda.com/pkgs/main") + payload, _, _ = run_command( + Commands.CONFIG, prefix, "--get", "channels", "--json" + ) + default_channels = json_loads(payload)["get"].get( + "channels", ["defaults"] + ) + run_command( + Commands.CONFIG, + prefix, + "--add", + "channels", + "https://repo.anaconda.com/pkgs/main", + ) # config --append on an empty key pre-populates it with the hardcoded default value! for channel in default_channels: - run_command(Commands.CONFIG, prefix, "--remove", "channels", channel) + run_command( + Commands.CONFIG, prefix, "--remove", "channels", channel + ) - run_command(Commands.INSTALL, prefix, "-c", "conda-test", "flask", "python=" + flask_python) + run_command( + Commands.INSTALL, + prefix, + "-c", + "conda-test", + "flask", + "python=" + flask_python, + ) - touch(join(prefix, 'test.file')) # untracked file + touch(join(prefix, "test.file")) # untracked file with make_temp_env("--clone", prefix, "--offline") as clone_prefix: assert context.offline assert package_is_installed(clone_prefix, "python=" + flask_python) assert package_is_installed(clone_prefix, "flask=0.11.1=py_0") - assert isfile(join(clone_prefix, 'test.file')) # untracked file + assert isfile(join(clone_prefix, "test.file")) # untracked file def test_package_pinning(self): - with make_temp_env("python=2.7", "itsdangerous=0.24", "pytz=2017.3", no_capture=True) as prefix: + with make_temp_env( + "python=2.7", "itsdangerous=0.24", "pytz=2017.3", no_capture=True + ) as prefix: assert package_is_installed(prefix, "itsdangerous=0.24") assert package_is_installed(prefix, "python=2.7") assert package_is_installed(prefix, "pytz=2017.3") - with open(join(prefix, 'conda-meta', 'pinned'), 'w') as fh: + with open(join(prefix, "conda-meta", "pinned"), "w") as fh: fh.write("itsdangerous 0.24\n") run_command(Commands.UPDATE, prefix, "--all", no_capture=True) @@ -1173,9 +1451,20 @@ def test_package_pinning(self): def test_update_all_updates_pip_pkg(self): with make_temp_env("python=3.6", "pip", "pytz=2018", no_capture=True) as prefix: - pip_ioo, pip_ioe, _ = run_command(Commands.CONFIG, prefix, "--set", "pip_interop_enabled", "true") + pip_ioo, pip_ioe, _ = run_command( + Commands.CONFIG, prefix, "--set", "pip_interop_enabled", "true" + ) - pip_o, pip_e, _ = run_command(Commands.RUN, prefix, "--dev", "python", "-m", "pip", "install", "itsdangerous==0.24") + pip_o, pip_e, _ = run_command( + Commands.RUN, + prefix, + "--dev", + "python", + "-m", + "pip", + "install", + "itsdangerous==0.24", + ) PrefixData._cache_.clear() stdout, stderr, _ = run_command(Commands.LIST, prefix, "--json") assert not stderr @@ -1199,8 +1488,9 @@ def test_update_all_updates_pip_pkg(self): def test_package_optional_pinning(self): with make_temp_env() as prefix: - run_command(Commands.CONFIG, prefix, - "--add", "pinned_packages", "python=3.6.5") + run_command( + Commands.CONFIG, prefix, "--add", "pinned_packages", "python=3.6.5" + ) run_command(Commands.INSTALL, prefix, "openssl") assert not package_is_installed(prefix, "python") run_command(Commands.INSTALL, prefix, "flask") @@ -1208,35 +1498,39 @@ def test_package_optional_pinning(self): def test_update_deps_flag_absent(self): with make_temp_env("python=2", "itsdangerous=0.24") as prefix: - assert package_is_installed(prefix, 'python=2') - assert package_is_installed(prefix, 'itsdangerous=0.24') - assert not package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "python=2") + assert package_is_installed(prefix, "itsdangerous=0.24") + assert not package_is_installed(prefix, "flask") - run_command(Commands.INSTALL, prefix, 'flask') - assert package_is_installed(prefix, 'python=2') - assert package_is_installed(prefix, 'itsdangerous=0.24') - assert package_is_installed(prefix, 'flask') + run_command(Commands.INSTALL, prefix, "flask") + assert package_is_installed(prefix, "python=2") + assert package_is_installed(prefix, "itsdangerous=0.24") + assert package_is_installed(prefix, "flask") def test_update_deps_flag_present(self): with make_temp_env("python=2", "itsdangerous=0.24") as prefix: - assert package_is_installed(prefix, 'python=2') - assert package_is_installed(prefix, 'itsdangerous=0.24') - assert not package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "python=2") + assert package_is_installed(prefix, "itsdangerous=0.24") + assert not package_is_installed(prefix, "flask") - run_command(Commands.INSTALL, prefix, '--update-deps', 'python=2', 'flask') - assert package_is_installed(prefix, 'python=2') - assert not package_is_installed(prefix, 'itsdangerous=0.24') - assert package_is_installed(prefix, 'itsdangerous') - assert package_is_installed(prefix, 'flask') + run_command(Commands.INSTALL, prefix, "--update-deps", "python=2", "flask") + assert package_is_installed(prefix, "python=2") + assert not package_is_installed(prefix, "itsdangerous=0.24") + assert package_is_installed(prefix, "itsdangerous") + assert package_is_installed(prefix, "flask") @pytest.mark.skipif(True, reason="Add this test back someday.") # @pytest.mark.skipif(not on_win, reason="shortcuts only relevant on Windows") def test_shortcut_in_underscore_env_shows_message(self): prefix = make_temp_prefix("_" + str(uuid4())[:7]) with make_temp_env(prefix=prefix): - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "console_shortcut") - assert ("Environment name starts with underscore '_'. " - "Skipping menu installation." in stderr) + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "console_shortcut" + ) + assert ( + "Environment name starts with underscore '_'. " + "Skipping menu installation." in stderr + ) @pytest.mark.skipif(not on_win, reason="shortcuts only relevant on Windows") def test_shortcut_not_attempted_with_no_shortcuts_arg(self): @@ -1244,17 +1538,21 @@ def test_shortcut_not_attempted_with_no_shortcuts_arg(self): shortcut_dir = get_shortcut_dir() shortcut_file = join(shortcut_dir, f"Anaconda Prompt ({basename(prefix)}).lnk") with make_temp_env(prefix=prefix): - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "console_shortcut", - "--no-shortcuts") - assert ("Environment name starts with underscore '_'. Skipping menu installation." - not in stderr) + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "console_shortcut", "--no-shortcuts" + ) + assert ( + "Environment name starts with underscore '_'. Skipping menu installation." + not in stderr + ) assert not isfile(shortcut_file) @pytest.mark.skipif(not on_win, reason="shortcuts only relevant on Windows") def test_shortcut_creation_installs_shortcut(self): shortcut_dir = get_shortcut_dir() shortcut_dir = join( - shortcut_dir, "Anaconda{} ({}-bit)" "".format(sys.version_info.major, context.bits) + shortcut_dir, + "Anaconda{} ({}-bit)" "".format(sys.version_info.major, context.bits), ) prefix = make_temp_prefix(str(uuid4())[:7]) @@ -1269,8 +1567,8 @@ def test_shortcut_creation_installs_shortcut(self): ) # make sure that cleanup without specifying --shortcuts still removes shortcuts - run_command(Commands.REMOVE, prefix, 'console_shortcut') - assert not package_is_installed(prefix, 'console_shortcut') + run_command(Commands.REMOVE, prefix, "console_shortcut") + assert not package_is_installed(prefix, "console_shortcut") assert not isfile(shortcut_file) finally: rmtree(prefix, ignore_errors=True) @@ -1281,7 +1579,8 @@ def test_shortcut_creation_installs_shortcut(self): def test_shortcut_absent_does_not_barf_on_uninstall(self): shortcut_dir = get_shortcut_dir() shortcut_dir = join( - shortcut_dir, "Anaconda{} ({}-bit)" "".format(sys.version_info.major, context.bits) + shortcut_dir, + "Anaconda{} ({}-bit)" "".format(sys.version_info.major, context.bits), ) prefix = make_temp_prefix(str(uuid4())[:7]) @@ -1291,12 +1590,12 @@ def test_shortcut_absent_does_not_barf_on_uninstall(self): try: # including --no-shortcuts should not get shortcuts installed with make_temp_env("console_shortcut", "--no-shortcuts", prefix=prefix): - assert package_is_installed(prefix, 'console_shortcut') + assert package_is_installed(prefix, "console_shortcut") assert not isfile(shortcut_file) # make sure that cleanup without specifying --shortcuts still removes shortcuts - run_command(Commands.REMOVE, prefix, 'console_shortcut') - assert not package_is_installed(prefix, 'console_shortcut') + run_command(Commands.REMOVE, prefix, "console_shortcut") + assert not package_is_installed(prefix, "console_shortcut") assert not isfile(shortcut_file) finally: rmtree(prefix, ignore_errors=True) @@ -1307,7 +1606,8 @@ def test_shortcut_absent_does_not_barf_on_uninstall(self): def test_shortcut_absent_when_condarc_set(self): shortcut_dir = get_shortcut_dir() shortcut_dir = join( - shortcut_dir, "Anaconda{} ({}-bit)" "".format(sys.version_info.major, context.bits) + shortcut_dir, + "Anaconda{} ({}-bit)" "".format(sys.version_info.major, context.bits), ) prefix = make_temp_prefix(str(uuid4())[:7]) @@ -1318,18 +1618,18 @@ def test_shortcut_absent_when_condarc_set(self): run_command(Commands.CONFIG, prefix, "--set", "shortcuts", "false") stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--get", "--json") json_obj = json_loads(stdout) - assert json_obj['rc_path'] == join(prefix, 'condarc') - assert json_obj['get']['shortcuts'] is False + assert json_obj["rc_path"] == join(prefix, "condarc") + assert json_obj["get"]["shortcuts"] is False try: with make_temp_env("console_shortcut", prefix=prefix): # including shortcuts: False from condarc should not get shortcuts installed - assert package_is_installed(prefix, 'console_shortcut') + assert package_is_installed(prefix, "console_shortcut") assert not isfile(shortcut_file) # make sure that cleanup without specifying --shortcuts still removes shortcuts - run_command(Commands.REMOVE, prefix, 'console_shortcut') - assert not package_is_installed(prefix, 'console_shortcut') + run_command(Commands.REMOVE, prefix, "console_shortcut") + assert not package_is_installed(prefix, "console_shortcut") assert not isfile(shortcut_file) finally: rmtree(prefix, ignore_errors=True) @@ -1342,20 +1642,24 @@ def test_create_default_packages(self): prefix = make_temp_prefix(str(uuid4())[:7]) # set packages - run_command(Commands.CONFIG, prefix, "--add", "create_default_packages", "pip") - run_command(Commands.CONFIG, prefix, "--add", "create_default_packages", "flask") + run_command( + Commands.CONFIG, prefix, "--add", "create_default_packages", "pip" + ) + run_command( + Commands.CONFIG, prefix, "--add", "create_default_packages", "flask" + ) stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show") yml_obj = yaml_round_trip_load(stdout) - assert yml_obj['create_default_packages'] == ['flask', 'pip'] + assert yml_obj["create_default_packages"] == ["flask", "pip"] - assert not package_is_installed(prefix, 'python=2') - assert not package_is_installed(prefix, 'pytz') - assert not package_is_installed(prefix, 'flask') + assert not package_is_installed(prefix, "python=2") + assert not package_is_installed(prefix, "pytz") + assert not package_is_installed(prefix, "flask") with make_temp_env("python=2", "pytz", prefix=prefix): - assert package_is_installed(prefix, 'python=2') - assert package_is_installed(prefix, 'pytz') - assert package_is_installed(prefix, 'flask') + assert package_is_installed(prefix, "python=2") + assert package_is_installed(prefix, "pytz") + assert package_is_installed(prefix, "flask") finally: rmtree(prefix, ignore_errors=True) @@ -1365,47 +1669,64 @@ def test_create_default_packages_no_default_packages(self): prefix = make_temp_prefix(str(uuid4())[:7]) # set packages - run_command(Commands.CONFIG, prefix, "--add", "create_default_packages", "pip") - run_command(Commands.CONFIG, prefix, "--add", "create_default_packages", "flask") + run_command( + Commands.CONFIG, prefix, "--add", "create_default_packages", "pip" + ) + run_command( + Commands.CONFIG, prefix, "--add", "create_default_packages", "flask" + ) stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show") yml_obj = yaml_round_trip_load(stdout) - assert yml_obj['create_default_packages'] == ['flask', 'pip'] + assert yml_obj["create_default_packages"] == ["flask", "pip"] - assert not package_is_installed(prefix, 'python=2') - assert not package_is_installed(prefix, 'pytz') - assert not package_is_installed(prefix, 'flask') + assert not package_is_installed(prefix, "python=2") + assert not package_is_installed(prefix, "pytz") + assert not package_is_installed(prefix, "flask") - with make_temp_env("python=2", "pytz", "--no-default-packages", prefix=prefix): - assert package_is_installed(prefix, 'python=2') - assert package_is_installed(prefix, 'pytz') - assert not package_is_installed(prefix, 'flask') + with make_temp_env( + "python=2", "pytz", "--no-default-packages", prefix=prefix + ): + assert package_is_installed(prefix, "python=2") + assert package_is_installed(prefix, "pytz") + assert not package_is_installed(prefix, "flask") finally: rmtree(prefix, ignore_errors=True) def test_create_dry_run(self): # Regression test for #3453 - prefix = '/some/place' + prefix = "/some/place" with pytest.raises(DryRunExit): run_command(Commands.CREATE, prefix, "--dry-run") - output, _, _ = run_command(Commands.CREATE, prefix, "--dry-run", use_exception_handler=True) - assert join('some', 'place') in output + output, _, _ = run_command( + Commands.CREATE, prefix, "--dry-run", use_exception_handler=True + ) + assert join("some", "place") in output # TODO: This assert passes locally but fails on CI boxes; figure out why and re-enable # assert "The following empty environments will be CREATED" in stdout - prefix = '/another/place' + prefix = "/another/place" with pytest.raises(DryRunExit): run_command(Commands.CREATE, prefix, "flask", "--dry-run") - output, _, _ = run_command(Commands.CREATE, prefix, "flask", "--dry-run", use_exception_handler=True) + output, _, _ = run_command( + Commands.CREATE, prefix, "flask", "--dry-run", use_exception_handler=True + ) assert ":flask" in output assert ":python" in output - assert join('another', 'place') in output + assert join("another", "place") in output def test_create_dry_run_json(self): - prefix = '/some/place' + prefix = "/some/place" with pytest.raises(DryRunExit): run_command(Commands.CREATE, prefix, "flask", "--dry-run", "--json") - output, _, _ = run_command(Commands.CREATE, prefix, "flask", "--dry-run", "--json", use_exception_handler=True) + output, _, _ = run_command( + Commands.CREATE, + prefix, + "flask", + "--dry-run", + "--json", + use_exception_handler=True, + ) loaded = json.loads(output) names = {d["name"] for d in loaded["actions"]["LINK"]} assert "python" in names @@ -1423,38 +1744,68 @@ def test_packages_not_found(self): run_command(Commands.INSTALL, prefix, "not-a-real-package") assert "not-a-real-package" in str(exc.value) - _, error, _ = run_command(Commands.INSTALL, prefix, "not-a-real-package", - use_exception_handler=True) + _, error, _ = run_command( + Commands.INSTALL, + prefix, + "not-a-real-package", + use_exception_handler=True, + ) assert "not-a-real-package" in error def test_conda_pip_interop_dependency_satisfied_by_pip(self): - with make_temp_env("python=3.10", "pip", use_restricted_unicode=False) as prefix: + with make_temp_env( + "python=3.10", "pip", use_restricted_unicode=False + ) as prefix: run_command(Commands.CONFIG, prefix, "--set", "pip_interop_enabled", "true") - run_command(Commands.RUN, prefix, "--dev", "python", "-m", "pip", "install", "itsdangerous") + run_command( + Commands.RUN, + prefix, + "--dev", + "python", + "-m", + "pip", + "install", + "itsdangerous", + ) PrefixData._cache_.clear() output, error, _ = run_command(Commands.LIST, prefix) - assert 'itsdangerous' in output + assert "itsdangerous" in output assert not error - output, _, _ = run_command(Commands.INSTALL, prefix, 'flask', '--dry-run', '--json', - use_exception_handler=True) + output, _, _ = run_command( + Commands.INSTALL, + prefix, + "flask", + "--dry-run", + "--json", + use_exception_handler=True, + ) json_obj = json.loads(output) print(json_obj) # itsdangerous shouldn't be in this list, because it's already present and satisfied # by the pip package assert any(rec["name"] == "flask" for rec in json_obj["actions"]["LINK"]) - assert not any(rec["name"] == "itsdangerous" for rec in json_obj["actions"]["LINK"]) + assert not any( + rec["name"] == "itsdangerous" for rec in json_obj["actions"]["LINK"] + ) - output, error, _ = run_command(Commands.SEARCH, prefix, "not-a-real-package", "--json", - use_exception_handler=True) + output, error, _ = run_command( + Commands.SEARCH, + prefix, + "not-a-real-package", + "--json", + use_exception_handler=True, + ) assert not error json_obj = json_loads(output.strip()) - assert json_obj['exception_name'] == 'PackagesNotFoundError' + assert json_obj["exception_name"] == "PackagesNotFoundError" assert not len(json_obj.keys()) == 0 # XXX this test fails for osx-arm64 or other platforms absent from old 'free' channel - @pytest.mark.skipif(context.subdir == "win-32", reason="metadata is wrong; give python2.7") + @pytest.mark.skipif( + context.subdir == "win-32", reason="metadata is wrong; give python2.7" + ) def test_conda_pip_interop_pip_clobbers_conda(self): # 1. conda install old six # 2. pip install -U six @@ -1464,8 +1815,14 @@ def test_conda_pip_interop_pip_clobbers_conda(self): # File "C:\Users\builder\AppData\Local\Temp\f903_固ō한ñђáγßê家ôç_35\lib\site-packages\pip\_vendor\urllib3\util\ssl_.py", line 313, in ssl_wrap_socket # context.load_verify_locations(ca_certs, ca_cert_dir) # TypeError: cafile should be a valid filesystem path - with make_temp_env("-c", "https://repo.anaconda.com/pkgs/free", "six=1.9", "pip=9.0.3", "python=3.5", - use_restricted_unicode=on_win) as prefix: + with make_temp_env( + "-c", + "https://repo.anaconda.com/pkgs/free", + "six=1.9", + "pip=9.0.3", + "python=3.5", + use_restricted_unicode=on_win, + ) as prefix: run_command(Commands.CONFIG, prefix, "--set", "pip_interop_enabled", "true") assert package_is_installed(prefix, "six=1.9.0") assert package_is_installed(prefix, "python=3.5") @@ -1473,20 +1830,36 @@ def test_conda_pip_interop_pip_clobbers_conda(self): # On Windows, it's more than prefix.lower(), we get differently shortened paths too. # If only we could use pathlib. if not on_win: - output, _, _ = run_command(Commands.RUN, prefix, which_or_where, "python") - assert prefix.lower() in output.lower(), \ - "We should be running python in {}\n" \ - "We are running {}\n" \ - "Please check the CONDA_PREFIX PATH promotion in tests/__init__.py\n" \ - "for a likely place to add more fixes".format(prefix, output) - output, _, _ = run_command(Commands.RUN, prefix, "python", "-m", "pip", "freeze") - pkgs = {ensure_text_type(v.strip()) for v in output.splitlines() if v.strip()} + output, _, _ = run_command( + Commands.RUN, prefix, which_or_where, "python" + ) + assert prefix.lower() in output.lower(), ( + "We should be running python in {}\n" + "We are running {}\n" + "Please check the CONDA_PREFIX PATH promotion in tests/__init__.py\n" + "for a likely place to add more fixes".format(prefix, output) + ) + output, _, _ = run_command( + Commands.RUN, prefix, "python", "-m", "pip", "freeze" + ) + pkgs = { + ensure_text_type(v.strip()) for v in output.splitlines() if v.strip() + } assert "six==1.9.0" in pkgs py_ver = get_python_version_for_prefix(prefix) sp_dir = get_python_site_packages_short_path(py_ver) - output, _, _ = run_command(Commands.RUN, prefix, "python", "-m", "pip", "install", "-U", "six==1.10") + output, _, _ = run_command( + Commands.RUN, + prefix, + "python", + "-m", + "pip", + "install", + "-U", + "six==1.10", + ) assert "Successfully installed six-1.10.0" in ensure_text_type(output) PrefixData._cache_.clear() stdout, stderr, _ = run_command(Commands.LIST, prefix, "--json") @@ -1504,8 +1877,12 @@ def test_conda_pip_interop_pip_clobbers_conda(self): "version": "1.10.0", } assert package_is_installed(prefix, "six=1.10.0") - output, err, _ = run_command(Commands.RUN, prefix, "python", "-m", "pip", "freeze") - pkgs = {ensure_text_type(v.strip()) for v in output.splitlines() if v.strip()} + output, err, _ = run_command( + Commands.RUN, prefix, "python", "-m", "pip", "freeze" + ) + pkgs = { + ensure_text_type(v.strip()) for v in output.splitlines() if v.strip() + } assert "six==1.10.0" in pkgs six_record = next(PrefixData(prefix).query("six")) @@ -1515,9 +1892,7 @@ def test_conda_pip_interop_pip_clobbers_conda(self): "build_number": 0, "channel": "https://conda.anaconda.org/pypi", "constrains": [], - "depends": [ - "python 3.5.*" - ], + "depends": ["python 3.5.*"], "files": [ sp_dir + "/" + "__pycache__/six.cpython-35.pyc", sp_dir + "/" + "six-1.10.0.dist-info/DESCRIPTION.rst", @@ -1538,79 +1913,101 @@ def test_conda_pip_interop_pip_clobbers_conda(self): "_path": sp_dir + "/" + "__pycache__/six.cpython-35.pyc", "path_type": "hardlink", "sha256": None, - "size_in_bytes": None + "size_in_bytes": None, }, { - "_path": sp_dir + "/" + "six-1.10.0.dist-info/DESCRIPTION.rst", + "_path": sp_dir + + "/" + + "six-1.10.0.dist-info/DESCRIPTION.rst", "path_type": "hardlink", "sha256": "QWBtSTT2zzabwJv1NQbTfClSX13m-Qc6tqU4TRL1RLs", - "size_in_bytes": 774 + "size_in_bytes": 774, }, { "_path": sp_dir + "/" + "six-1.10.0.dist-info/INSTALLER", "path_type": "hardlink", "sha256": "zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg", - "size_in_bytes": 4 + "size_in_bytes": 4, }, { "_path": sp_dir + "/" + "six-1.10.0.dist-info/METADATA", "path_type": "hardlink", "sha256": "5HceJsUnHof2IRamlCKO2MwNjve1eSP4rLzVQDfwpCQ", - "size_in_bytes": 1283 + "size_in_bytes": 1283, }, { "_path": sp_dir + "/" + "six-1.10.0.dist-info/RECORD", "path_type": "hardlink", "sha256": None, - "size_in_bytes": None + "size_in_bytes": None, }, { "_path": sp_dir + "/" + "six-1.10.0.dist-info/WHEEL", "path_type": "hardlink", "sha256": "GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE", - "size_in_bytes": 110 + "size_in_bytes": 110, }, { - "_path": sp_dir + "/" + "six-1.10.0.dist-info/metadata.json", + "_path": sp_dir + + "/" + + "six-1.10.0.dist-info/metadata.json", "path_type": "hardlink", "sha256": "jtOeeTBubYDChl_5Ql5ZPlKoHgg6rdqRIjOz1e5Ek2U", - "size_in_bytes": 658 + "size_in_bytes": 658, }, { - "_path": sp_dir + "/" + "six-1.10.0.dist-info/top_level.txt", + "_path": sp_dir + + "/" + + "six-1.10.0.dist-info/top_level.txt", "path_type": "hardlink", "sha256": "_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais", - "size_in_bytes": 4 + "size_in_bytes": 4, }, { "_path": sp_dir + "/" + "six.py", "path_type": "hardlink", "sha256": "A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas", - "size_in_bytes": 30098 - } + "size_in_bytes": 30098, + }, ], - "paths_version": 1 + "paths_version": 1, }, "subdir": "pypi", - "version": "1.10.0" + "version": "1.10.0", } - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "six", "--satisfied-skip-solve") + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "six", "--satisfied-skip-solve" + ) assert not stderr assert "All requested packages already installed." in stdout - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "six", "--repodata-fn", - "repodata.json") + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "six", "--repodata-fn", "repodata.json" + ) assert not stderr assert package_is_installed(prefix, "six>=1.11") - output, err, _ = run_command(Commands.RUN, prefix, "python", "-m", "pip", "freeze") - pkgs = {ensure_text_type(v.strip()) for v in output.splitlines() if v.strip()} + output, err, _ = run_command( + Commands.RUN, prefix, "python", "-m", "pip", "freeze" + ) + pkgs = { + ensure_text_type(v.strip()) for v in output.splitlines() if v.strip() + } six_record = next(PrefixData(prefix).query("six")) assert "six==%s" % six_record.version in pkgs assert len(glob(join(prefix, "conda-meta", "six-*.json"))) == 1 - output, err, _ = run_command(Commands.RUN, prefix, "python", "-m", "pip", "install", "-U", "six==1.10") + output, err, _ = run_command( + Commands.RUN, + prefix, + "python", + "-m", + "pip", + "install", + "-U", + "six==1.10", + ) print(output) assert "Successfully installed six-1.10.0" in ensure_text_type(output) PrefixData._cache_.clear() @@ -1641,13 +2038,24 @@ def test_conda_pip_interop_conda_editable_package(self): ) as prefix: workdir = prefix - run_command(Commands.CONFIG, prefix, "--set", "pip_interop_enabled", "true") + run_command( + Commands.CONFIG, prefix, "--set", "pip_interop_enabled", "true" + ) assert package_is_installed(prefix, "python") # install an "editable" urllib3 that cannot be managed - output, err, _ = run_command(Commands.RUN, prefix, '--cwd', workdir, - "python", "-m", "pip", "install", "-e", - "git+https://github.com/urllib3/urllib3.git@1.19.1#egg=urllib3") + output, err, _ = run_command( + Commands.RUN, + prefix, + "--cwd", + workdir, + "python", + "-m", + "pip", + "install", + "-e", + "git+https://github.com/urllib3/urllib3.git@1.19.1#egg=urllib3", + ) assert isfile(join(workdir, "src", "urllib3", "urllib3", "__init__.py")) assert not isfile(join("src", "urllib3", "urllib3", "__init__.py")) PrefixData._cache_.clear() @@ -1666,22 +2074,25 @@ def test_conda_pip_interop_conda_editable_package(self): "cryptography >=1.3.4", "idna >=2.0.0", "pyopenssl >=0.14", - "pysocks !=1.5.7,<2.0,>=1.5.6" - ], - "depends": [ - "python 2.7.*" + "pysocks !=1.5.7,<2.0,>=1.5.6", ], + "depends": ["python 2.7.*"], "fn": "urllib3-1.19.1-dev_0", "name": "urllib3", "package_type": "virtual_python_egg_link", "subdir": "pypi", - "version": "1.19.1" + "version": "1.19.1", } # the unmanageable urllib3 should prevent a new requests from being installed - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, - "requests", "--dry-run", "--json", - use_exception_handler=True) + stdout, stderr, _ = run_command( + Commands.INSTALL, + prefix, + "requests", + "--dry-run", + "--json", + use_exception_handler=True, + ) assert not stderr json_obj = json_loads(stdout) assert "UNLINK" not in json_obj["actions"] @@ -1691,7 +2102,9 @@ def test_conda_pip_interop_conda_editable_package(self): assert VersionOrder(link_dists[0]["version"]) < VersionOrder("2.16") # should already be satisfied - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "urllib3", "-S") + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "urllib3", "-S" + ) assert "All requested packages already installed." in stdout # should raise an error @@ -1699,11 +2112,16 @@ def test_conda_pip_interop_conda_editable_package(self): # TODO: This raises PackagesNotFoundError, but the error should really explain # that we can't install urllib3 because it's already installed and # unmanageable. The error should suggest trying to use pip to uninstall it. - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "urllib3=1.20", "--dry-run") + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "urllib3=1.20", "--dry-run" + ) # Now install a manageable urllib3. - output = check_output(PYTHON_BINARY + " -m pip install -U urllib3==1.20", - cwd=prefix, shell=True) + output = check_output( + PYTHON_BINARY + " -m pip install -U urllib3==1.20", + cwd=prefix, + shell=True, + ) print(output) PrefixData._cache_.clear() assert package_is_installed(prefix, "urllib3") @@ -1717,56 +2135,74 @@ def test_conda_pip_interop_conda_editable_package(self): "build": "pypi_0", "build_number": 0, "channel": "https://conda.anaconda.org/pypi", - "constrains": [ - "pysocks >=1.5.6,<2.0,!=1.5.7" - ], - "depends": [ - "python 2.7.*" - ], + "constrains": ["pysocks >=1.5.6,<2.0,!=1.5.7"], + "depends": ["python 2.7.*"], "fn": "urllib3-1.20.dist-info", "name": "urllib3", "package_type": "virtual_python_wheel", "subdir": "pypi", - "version": "1.20" + "version": "1.20", } # we should be able to install an unbundled requests that upgrades urllib3 in the process - stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "requests=2.18", "--json") + stdout, stderr, _ = run_command( + Commands.INSTALL, prefix, "requests=2.18", "--json" + ) assert package_is_installed(prefix, "requests") assert package_is_installed(prefix, "urllib3>=1.21") assert not stderr json_obj = json_loads(stdout) unlink_dists = [ - dist_obj for dist_obj in json_obj["actions"]["UNLINK"] if dist_obj.get("platform") == "pypi" + dist_obj + for dist_obj in json_obj["actions"]["UNLINK"] + if dist_obj.get("platform") == "pypi" ] # filter out conda package upgrades like python and libffi assert len(unlink_dists) == 1 assert unlink_dists[0]["name"] == "urllib3" assert unlink_dists[0]["channel"] == "pypi" - def test_conda_pip_interop_compatible_release_operator(self): # Regression test for #7776 # important to start the env with six 1.9. That version forces an upgrade later in the test - with make_temp_env("-c", "https://repo.anaconda.com/pkgs/free", "pip=10", "six=1.9", "appdirs", - use_restricted_unicode=on_win) as prefix: + with make_temp_env( + "-c", + "https://repo.anaconda.com/pkgs/free", + "pip=10", + "six=1.9", + "appdirs", + use_restricted_unicode=on_win, + ) as prefix: run_command(Commands.CONFIG, prefix, "--set", "pip_interop_enabled", "true") assert package_is_installed(prefix, "python") assert package_is_installed(prefix, "six=1.9") assert package_is_installed(prefix, "appdirs>=1.4.3") python_binary = join(prefix, PYTHON_BINARY) - p = Popen([python_binary, '-m', 'pip', 'install', 'fs==2.1.0'], - stdout=PIPE, stderr=PIPE, cwd=prefix, shell=False) + p = Popen( + [python_binary, "-m", "pip", "install", "fs==2.1.0"], + stdout=PIPE, + stderr=PIPE, + cwd=prefix, + shell=False, + ) stdout, stderr = p.communicate() rc = p.returncode assert int(rc) != 0 - stderr = stderr.decode('utf-8', errors='replace') if hasattr(stderr, 'decode') else str(stderr) + stderr = ( + stderr.decode("utf-8", errors="replace") + if hasattr(stderr, "decode") + else str(stderr) + ) assert "Cannot uninstall" in stderr run_command(Commands.REMOVE, prefix, "six") assert not package_is_installed(prefix, "six") - output = check_output([python_binary, '-m', 'pip', 'install', 'fs==2.1.0'], cwd=prefix, shell=False) + output = check_output( + [python_binary, "-m", "pip", "install", "fs==2.1.0"], + cwd=prefix, + shell=False, + ) print(output) PrefixData._cache_.clear() assert package_is_installed(prefix, "fs==2.1.0") @@ -1776,29 +2212,41 @@ def test_conda_pip_interop_compatible_release_operator(self): stdout, stderr, _ = run_command(Commands.LIST, prefix) assert not stderr - assert "fs 2.1.0 pypi_0 pypi" in stdout + assert ( + "fs 2.1.0 pypi_0 pypi" + in stdout + ) with pytest.raises(DryRunExit): - run_command(Commands.INSTALL, prefix, "-c", "https://repo.anaconda.com/pkgs/free", - "agate=1.6", "--dry-run") + run_command( + Commands.INSTALL, + prefix, + "-c", + "https://repo.anaconda.com/pkgs/free", + "agate=1.6", + "--dry-run", + ) def test_install_freezes_env_by_default(self): """We pass --no-update-deps/--freeze-installed by default, effectively. This helps speed things - up by not considering changes to existing stuff unless the solve ends up unsatisfiable.""" + up by not considering changes to existing stuff unless the solve ends up unsatisfiable. + """ # create an initial env - with make_temp_env("python=2", use_restricted_unicode=on_win, no_capture=True) as prefix: + with make_temp_env( + "python=2", use_restricted_unicode=on_win, no_capture=True + ) as prefix: assert package_is_installed(prefix, "python=2.7.*") # Install a version older than the last one run_command(Commands.INSTALL, prefix, "setuptools=40.*") - stdout, stderr, _ = run_command(Commands.LIST, prefix, '--json') + stdout, stderr, _ = run_command(Commands.LIST, prefix, "--json") pkgs = json.loads(stdout) run_command(Commands.INSTALL, prefix, "imagesize", "--freeze-installed") - stdout, _, _ = run_command(Commands.LIST, prefix, '--json') + stdout, _, _ = run_command(Commands.LIST, prefix, "--json") pkgs_after_install = json.loads(stdout) # Compare before and after installing package @@ -1811,36 +2259,60 @@ def test_install_freezes_env_by_default(self): def test_search_gawk_not_win_filter(self): with make_temp_env() as prefix: stdout, stderr, _ = run_command( - Commands.SEARCH, prefix, "*gawk", "--platform", "win-64", "--json", - "-c", "https://repo.anaconda.com/pkgs/msys2", "--json", + Commands.SEARCH, + prefix, + "*gawk", + "--platform", + "win-64", + "--json", + "-c", + "https://repo.anaconda.com/pkgs/msys2", + "--json", use_exception_handler=True, ) - json_obj = json_loads(stdout.replace("Fetching package metadata ...", "").strip()) + json_obj = json_loads( + stdout.replace("Fetching package metadata ...", "").strip() + ) assert "m2-gawk" in json_obj.keys() assert len(json_obj.keys()) == 1 @pytest.mark.skipif(not on_win, reason="gawk is a windows only package") def test_search_gawk_on_win(self): with make_temp_env() as prefix: - stdout, _, _ = run_command(Commands.SEARCH, prefix, "*gawk", "--json", use_exception_handler=True) - json_obj = json_loads(stdout.replace("Fetching package metadata ...", "").strip()) + stdout, _, _ = run_command( + Commands.SEARCH, prefix, "*gawk", "--json", use_exception_handler=True + ) + json_obj = json_loads( + stdout.replace("Fetching package metadata ...", "").strip() + ) assert "m2-gawk" in json_obj.keys() assert len(json_obj.keys()) == 1 @pytest.mark.skipif(not on_win, reason="gawk is a windows only package") def test_search_gawk_on_win_filter(self): with make_temp_env() as prefix: - stdout, _, _ = run_command(Commands.SEARCH, prefix, "gawk", "--platform", - "linux-64", "--json", use_exception_handler=True) - json_obj = json_loads(stdout.replace("Fetching package metadata ...", "").strip()) + stdout, _, _ = run_command( + Commands.SEARCH, + prefix, + "gawk", + "--platform", + "linux-64", + "--json", + use_exception_handler=True, + ) + json_obj = json_loads( + stdout.replace("Fetching package metadata ...", "").strip() + ) assert not len(json_obj.keys()) == 0 def test_bad_anaconda_token_infinite_loop(self): # This test is being changed around 2017-10-17, when the behavior of anaconda.org # was changed. Previously, an expired token would return with a 401 response. # Now, a 200 response is always given, with any public packages available on the channel. - response = requests.get("https://conda.anaconda.org/t/cqgccfm1mfma/data-portal/" - "%s/repodata.json" % context.subdir) + response = requests.get( + "https://conda.anaconda.org/t/cqgccfm1mfma/data-portal/" + "%s/repodata.json" % context.subdir + ) assert response.status_code == 200 try: @@ -1865,7 +2337,9 @@ def test_bad_anaconda_token_infinite_loop(self): "--json", ) - stdout, stderr, _ = run_command(Commands.SEARCH, prefix, "anaconda-mosaic", "--json") + stdout, stderr, _ = run_command( + Commands.SEARCH, prefix, "anaconda-mosaic", "--json" + ) json_obj = json.loads(stdout) assert "anaconda-mosaic" in json_obj @@ -1888,7 +2362,9 @@ def test_anaconda_token_with_private_package(self): try: prefix = make_temp_prefix(str(uuid4())[:7]) channel_url = "https://conda.anaconda.org/kalefranz" - payload, _, _ = run_command(Commands.CONFIG, prefix, "--get", "channels", "--json") + payload, _, _ = run_command( + Commands.CONFIG, prefix, "--get", "channels", "--json" + ) default_channels = json_loads(payload)["get"].get("channels", ["defaults"]) run_command(Commands.CONFIG, prefix, "--append", "channels", channel_url) # config --append on an empty key pre-populates it with the hardcoded default value! @@ -1897,7 +2373,7 @@ def test_anaconda_token_with_private_package(self): output, _, _ = run_command(Commands.CONFIG, prefix, "--show") print(output) yml_obj = yaml_round_trip_load(output) - assert yml_obj['channels'] == [channel_url] + assert yml_obj["channels"] == [channel_url] output, _, _ = run_command( Commands.SEARCH, @@ -1909,7 +2385,7 @@ def test_anaconda_token_with_private_package(self): use_exception_handler=True, ) json_obj = json_loads(output) - assert json_obj['exception_name'] == 'PackagesNotFoundError' + assert json_obj["exception_name"] == "PackagesNotFoundError" finally: rmtree(prefix, ignore_errors=True) @@ -1919,7 +2395,9 @@ def test_anaconda_token_with_private_package(self): try: prefix = make_temp_prefix(str(uuid4())[:7]) channel_url = "https://conda.anaconda.org/t/zlZvSlMGN7CB/kalefranz" - payload, _, _ = run_command(Commands.CONFIG, prefix, "--get", "channels", "--json") + payload, _, _ = run_command( + Commands.CONFIG, prefix, "--get", "channels", "--json" + ) default_channels = json_loads(payload)["get"].get("channels", ["defaults"]) run_command(Commands.CONFIG, prefix, "--add", "channels", channel_url) for channel in default_channels: @@ -1927,19 +2405,22 @@ def test_anaconda_token_with_private_package(self): stdout, stderr, _ = run_command(Commands.CONFIG, prefix, "--show") yml_obj = yaml_round_trip_load(stdout) - assert yml_obj['channels'] == ["https://conda.anaconda.org/t//kalefranz"] + assert yml_obj["channels"] == [ + "https://conda.anaconda.org/t//kalefranz" + ] - stdout, stderr, _ = run_command(Commands.SEARCH, prefix, "anyjson", "--platform", - "linux-64", "--json") + stdout, stderr, _ = run_command( + Commands.SEARCH, prefix, "anyjson", "--platform", "linux-64", "--json" + ) json_obj = json_loads(stdout) - assert 'anyjson' in json_obj + assert "anyjson" in json_obj finally: rmtree(prefix, ignore_errors=True) def test_use_index_cache(self): - from conda.gateways.connection.session import CondaSession from conda.core.subdir_data import SubdirData + from conda.gateways.connection.session import CondaSession SubdirData.clear_cached_local_channel_data(exclude_file=False) @@ -1952,7 +2433,8 @@ def test_use_index_cache(self): # Then, populate the index cache. orig_get = CondaSession.get - with patch.object(CondaSession, 'get', autospec=True) as mock_method: + with patch.object(CondaSession, "get", autospec=True) as mock_method: + def side_effect(self, url, **kwargs): # Make sure that we don't use the cache because of the # corresponding HTTP header. This test is supposed to test @@ -1972,15 +2454,20 @@ def side_effect(self, url, **kwargs): # Next run with --use-index-cache and make sure it actually hits the cache # and does not go out fetching index data remotely. - with patch.object(CondaSession, 'get', autospec=True) as mock_method: + with patch.object(CondaSession, "get", autospec=True) as mock_method: + def side_effect(self, url, **kwargs): - if url.endswith('/repodata.json') or url.endswith('/repodata.json.bz2'): - raise AssertionError('Index cache was not hit') + if url.endswith("/repodata.json") or url.endswith( + "/repodata.json.bz2" + ): + raise AssertionError("Index cache was not hit") else: return orig_get(self, url, **kwargs) mock_method.side_effect = side_effect - run_command(Commands.INSTALL, prefix, "flask", "--json", "--use-index-cache") + run_command( + Commands.INSTALL, prefix, "flask", "--json", "--use-index-cache" + ) def test_offline_with_empty_index_cache(self): from conda.core.subdir_data import SubdirData @@ -1989,9 +2476,13 @@ def test_offline_with_empty_index_cache(self): try: with make_temp_env(use_restricted_unicode=on_win) as prefix: - pkgs_dir = join(prefix, 'pkgs') - with env_var('CONDA_PKGS_DIRS', pkgs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with make_temp_channel(['flask-2.1.3']) as channel: + pkgs_dir = join(prefix, "pkgs") + with env_var( + "CONDA_PKGS_DIRS", + pkgs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with make_temp_channel(["flask-2.1.3"]) as channel: # Clear the index cache. index_cache_dir = create_cache_dir() run_command(Commands.CLEAN, "", "--index-cache", "--yes") @@ -2007,30 +2498,51 @@ def test_offline_with_empty_index_cache(self): orig_get = CondaSession.get result_dict = {} + def side_effect(self, url, **kwargs): if not url.startswith("file://"): - raise AssertionError(f"Attempt to fetch repodata: {url}") + raise AssertionError( + f"Attempt to fetch repodata: {url}" + ) if url.startswith(channel): - result_dict['local_channel_seen'] = True + result_dict["local_channel_seen"] = True return orig_get(self, url, **kwargs) - with patch.object(CondaSession, 'get', autospec=True) as mock_method: + with patch.object( + CondaSession, "get", autospec=True + ) as mock_method: mock_method.side_effect = side_effect - SubdirData.clear_cached_local_channel_data(exclude_file=False) + SubdirData.clear_cached_local_channel_data( + exclude_file=False + ) # This first install passes because flask and its dependencies are in the # package cache. assert not package_is_installed(prefix, "flask") - run_command(Commands.INSTALL, prefix, "-c", channel, "flask", "--offline") + run_command( + Commands.INSTALL, + prefix, + "-c", + channel, + "flask", + "--offline", + ) assert package_is_installed(prefix, "flask") # The mock should have been called with our local channel URL though. - assert result_dict.get('local_channel_seen') + assert result_dict.get("local_channel_seen") # Fails because pytz cannot be found in available channels. with pytest.raises(PackagesNotFoundError): - run_command(Commands.INSTALL, prefix, "-c", channel, "pytz", "--offline") + run_command( + Commands.INSTALL, + prefix, + "-c", + channel, + "pytz", + "--offline", + ) assert not package_is_installed(prefix, "pytz") finally: SubdirData.clear_cached_local_channel_data(exclude_file=False) @@ -2038,36 +2550,40 @@ def side_effect(self, url, **kwargs): def test_create_from_extracted(self): with make_temp_package_cache() as pkgs_dir: assert context.pkgs_dirs == (pkgs_dir,) + def pkgs_dir_has_tarball(tarball_prefix): - return any(f.startswith(tarball_prefix) and any(f.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) - for f in os.listdir(pkgs_dir)) + return any( + f.startswith(tarball_prefix) + and any(f.endswith(ext) for ext in CONDA_PACKAGE_EXTENSIONS) + for f in os.listdir(pkgs_dir) + ) with make_temp_env() as prefix: # First, make sure the openssl package is present in the cache, # downloading it if needed - assert not pkgs_dir_has_tarball('openssl-') - run_command(Commands.INSTALL, prefix, 'openssl') - assert pkgs_dir_has_tarball('openssl-') + assert not pkgs_dir_has_tarball("openssl-") + run_command(Commands.INSTALL, prefix, "openssl") + assert pkgs_dir_has_tarball("openssl-") # Then, remove the tarball but keep the extracted directory around - run_command(Commands.CLEAN, prefix, '--tarballs', '--yes') - assert not pkgs_dir_has_tarball('openssl-') + run_command(Commands.CLEAN, prefix, "--tarballs", "--yes") + assert not pkgs_dir_has_tarball("openssl-") with make_temp_env() as prefix: # Finally, install openssl, enforcing the use of the extracted package. # We expect that the tarball does not appear again because we simply # linked the package from the extracted directory. If the tarball # appeared again, we decided to re-download the package for some reason. - run_command(Commands.INSTALL, prefix, 'openssl', '--offline') - assert not pkgs_dir_has_tarball('openssl-') + run_command(Commands.INSTALL, prefix, "openssl", "--offline") + assert not pkgs_dir_has_tarball("openssl-") def test_install_mkdir(self): try: prefix = make_temp_prefix() - with open(os.path.join(prefix, 'tempfile.txt'), "w") as f: - f.write('test') + with open(os.path.join(prefix, "tempfile.txt"), "w") as f: + f.write("test") assert isdir(prefix) - assert isfile(os.path.join(prefix, 'tempfile.txt')) + assert isfile(os.path.join(prefix, "tempfile.txt")) with pytest.raises(DirectoryNotACondaEnvironmentError): run_command(Commands.INSTALL, prefix, "python", "--mkdir") @@ -2079,7 +2595,14 @@ def test_install_mkdir(self): assert path_is_clean(prefix) # this part also a regression test for #4849 - run_command(Commands.INSTALL, prefix, "python-dateutil", "python", "--mkdir", no_capture=True) + run_command( + Commands.INSTALL, + prefix, + "python-dateutil", + "python", + "--mkdir", + no_capture=True, + ) assert package_is_installed(prefix, "python") assert package_is_installed(prefix, "python-dateutil") @@ -2089,20 +2612,24 @@ def test_install_mkdir(self): @pytest.mark.skipif(on_win, reason="python doesn't have dependencies on windows") def test_disallowed_packages(self): with make_temp_env() as prefix: - with env_var('CONDA_DISALLOWED_PACKAGES', 'sqlite&flask', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_DISALLOWED_PACKAGES", + "sqlite&flask", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with pytest.raises(CondaMultiError) as exc: - run_command(Commands.INSTALL, prefix, 'python') + run_command(Commands.INSTALL, prefix, "python") exc_val = exc.value.errors[0] assert isinstance(exc_val, DisallowedPackageError) - assert exc_val.dump_map()['package_ref']['name'] == 'sqlite' + assert exc_val.dump_map()["package_ref"]["name"] == "sqlite" def test_dont_remove_conda_1(self): pkgs_dirs = context.pkgs_dirs prefix = make_temp_prefix() - with env_vars({ - 'CONDA_ROOT_PREFIX': prefix, - 'CONDA_PKGS_DIRS': ','.join(pkgs_dirs) - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + {"CONDA_ROOT_PREFIX": prefix, "CONDA_PKGS_DIRS": ",".join(pkgs_dirs)}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with make_temp_env(prefix=prefix): _, _, _ = run_command(Commands.INSTALL, prefix, "conda", "conda-build") assert package_is_installed(prefix, "conda") @@ -2110,14 +2637,14 @@ def test_dont_remove_conda_1(self): assert package_is_installed(prefix, "conda-build") with pytest.raises(CondaMultiError) as exc: - run_command(Commands.REMOVE, prefix, 'conda') + run_command(Commands.REMOVE, prefix, "conda") assert any(isinstance(e, RemoveError) for e in exc.value.errors) assert package_is_installed(prefix, "conda") assert package_is_installed(prefix, "pycosat") with pytest.raises(CondaMultiError) as exc: - run_command(Commands.REMOVE, prefix, 'pycosat') + run_command(Commands.REMOVE, prefix, "pycosat") assert any(isinstance(e, RemoveError) for e in exc.value.errors) assert package_is_installed(prefix, "conda") @@ -2129,23 +2656,23 @@ def test_dont_remove_conda_2(self): pkgs_dirs = context.pkgs_dirs prefix = make_temp_prefix() with make_temp_env(prefix=prefix): - with env_vars({ - 'CONDA_ROOT_PREFIX': prefix, - 'CONDA_PKGS_DIRS': ','.join(pkgs_dirs) - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + {"CONDA_ROOT_PREFIX": prefix, "CONDA_PKGS_DIRS": ",".join(pkgs_dirs)}, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): _, _, _ = run_command(Commands.INSTALL, prefix, "conda") assert package_is_installed(prefix, "conda") assert package_is_installed(prefix, "pycosat") with pytest.raises(CondaMultiError) as exc: - run_command(Commands.REMOVE, prefix, 'pycosat') + run_command(Commands.REMOVE, prefix, "pycosat") assert any(isinstance(e, RemoveError) for e in exc.value.errors) assert package_is_installed(prefix, "conda") assert package_is_installed(prefix, "pycosat") with pytest.raises(CondaMultiError) as exc: - run_command(Commands.REMOVE, prefix, 'conda') + run_command(Commands.REMOVE, prefix, "conda") assert any(isinstance(e, RemoveError) for e in exc.value.errors) assert package_is_installed(prefix, "conda") @@ -2173,38 +2700,42 @@ def test_download_only_flag(self): from conda.core.link import UnlinkLinkTransaction with patch.object(UnlinkLinkTransaction, "execute") as mock_method: - with make_temp_env("openssl", "--download-only", use_exception_handler=True): + with make_temp_env( + "openssl", "--download-only", use_exception_handler=True + ): assert mock_method.call_count == 0 with make_temp_env("openssl", use_exception_handler=True): assert mock_method.call_count == 1 def test_transactional_rollback_simple(self): from conda.core.path_actions import CreatePrefixRecordAction - with patch.object(CreatePrefixRecordAction, 'execute') as mock_method: + + with patch.object(CreatePrefixRecordAction, "execute") as mock_method: with make_temp_env() as prefix: - mock_method.side_effect = KeyError('Bang bang!!') + mock_method.side_effect = KeyError("Bang bang!!") with pytest.raises(CondaMultiError): - run_command(Commands.INSTALL, prefix, 'openssl') - assert not package_is_installed(prefix, 'openssl') + run_command(Commands.INSTALL, prefix, "openssl") + assert not package_is_installed(prefix, "openssl") def test_transactional_rollback_upgrade_downgrade(self): with make_temp_env("python=3.8", no_capture=True) as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "python=3") - run_command(Commands.INSTALL, prefix, 'flask=2.1.3') - assert package_is_installed(prefix, 'flask=2.1.3') + run_command(Commands.INSTALL, prefix, "flask=2.1.3") + assert package_is_installed(prefix, "flask=2.1.3") from conda.core.path_actions import CreatePrefixRecordAction - with patch.object(CreatePrefixRecordAction, 'execute') as mock_method: - mock_method.side_effect = KeyError('Bang bang!!') + + with patch.object(CreatePrefixRecordAction, "execute") as mock_method: + mock_method.side_effect = KeyError("Bang bang!!") with pytest.raises(CondaMultiError): - run_command(Commands.INSTALL, prefix, 'flask=2.0.1') - assert package_is_installed(prefix, 'flask=2.1.3') + run_command(Commands.INSTALL, prefix, "flask=2.0.1") + assert package_is_installed(prefix, "flask=2.1.3") def test_directory_not_a_conda_environment(self): prefix = make_temp_prefix(str(uuid4())[:7]) - with open(join(prefix, 'tempfile.txt'), 'w') as f: + with open(join(prefix, "tempfile.txt"), "w") as f: f.write("weeee") try: with pytest.raises(DirectoryNotACondaEnvironmentError): @@ -2230,22 +2761,38 @@ def test_multiline_run_command(self): assert env_which_etc assert not errs_etc - @pytest.mark.skip('Test is flaky') + @pytest.mark.skip("Test is flaky") def test_conda_downgrade(self): # Create an environment with the current conda under test, but include an earlier # version of conda and other packages in that environment. # Make sure we can flip back and forth. - with env_vars({ - "CONDA_AUTO_UPDATE_CONDA": "false", - "CONDA_ALLOW_CONDA_DOWNGRADES": "true", - "CONDA_DLL_SEARCH_MODIFICATION_ENABLE": "1", - }, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_vars( + { + "CONDA_AUTO_UPDATE_CONDA": "false", + "CONDA_ALLOW_CONDA_DOWNGRADES": "true", + "CONDA_DLL_SEARCH_MODIFICATION_ENABLE": "1", + }, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): py_ver = "3" - with make_temp_env("conda=4.6.14", "python=" + py_ver, "conda-package-handling", use_restricted_unicode=True, - name = '_' + str(uuid4())[:8]) as prefix: # rev 0 + with make_temp_env( + "conda=4.6.14", + "python=" + py_ver, + "conda-package-handling", + use_restricted_unicode=True, + name="_" + str(uuid4())[:8], + ) as prefix: # rev 0 # See comment in test_init_dev_and_NoBaseEnvironmentError. - python_exe = join(prefix, 'python.exe') if on_win else join(prefix, 'bin', 'python') - conda_exe = join(prefix, 'Scripts', 'conda.exe') if on_win else join(prefix, 'bin', 'conda') + python_exe = ( + join(prefix, "python.exe") + if on_win + else join(prefix, "bin", "python") + ) + conda_exe = ( + join(prefix, "Scripts", "conda.exe") + if on_win + else join(prefix, "bin", "conda") + ) # this is used to run the python interpreter in the env and loads our dev # version of conda py_co = [python_exe, "-m", "conda"] @@ -2256,12 +2803,16 @@ def test_conda_downgrade(self): assert package_is_installed(prefix, "lockfile") # runs the conda in the env to install something new into the env - subprocess_call_with_clean_env([conda_exe, "install", "-yp", prefix, "itsdangerous"], path=prefix) #rev 2 + subprocess_call_with_clean_env( + [conda_exe, "install", "-yp", prefix, "itsdangerous"], path=prefix + ) # rev 2 PrefixData._cache_.clear() assert package_is_installed(prefix, "itsdangerous") # downgrade the version of conda in the env, using our dev version of conda - subprocess_call(py_co + ["install", "-yp", prefix, "conda<4.6.14"], path=prefix) #rev 3 + subprocess_call( + py_co + ["install", "-yp", prefix, "conda<4.6.14"], path=prefix + ) # rev 3 PrefixData._cache_.clear() assert not package_is_installed(prefix, "conda=4.6.14") @@ -2276,32 +2827,42 @@ def test_conda_downgrade(self): assert package_is_installed(prefix, "conda=4.6.14") # use the conda in the env to revert to a previous state - subprocess_call_with_clean_env([conda_exe, "install", "-yp", prefix, "--rev", "1"], path=prefix) + subprocess_call_with_clean_env( + [conda_exe, "install", "-yp", prefix, "--rev", "1"], path=prefix + ) PrefixData._cache_.clear() assert not package_is_installed(prefix, "itsdangerous") PrefixData._cache_.clear() assert package_is_installed(prefix, "conda=4.6.14") assert package_is_installed(prefix, "python=" + py_ver) - result = subprocess_call_with_clean_env([conda_exe, "info", "--json"], path=prefix) + result = subprocess_call_with_clean_env( + [conda_exe, "info", "--json"], path=prefix + ) conda_info = json.loads(result.stdout) assert conda_info["conda_version"] == "4.6.14" @pytest.mark.skipif(on_win, reason="openssl only has a postlink script on unix") def test_run_script_called(self): import conda.core.link - with patch.object(conda.core.link, 'subprocess_call') as rs: + + with patch.object(conda.core.link, "subprocess_call") as rs: rs.return_value = Response(None, None, 0) - with make_temp_env("-c", "http://repo.anaconda.com/pkgs/free", "openssl=1.0.2j", "--no-deps") as prefix: - assert package_is_installed(prefix, 'openssl') + with make_temp_env( + "-c", + "http://repo.anaconda.com/pkgs/free", + "openssl=1.0.2j", + "--no-deps", + ) as prefix: + assert package_is_installed(prefix, "openssl") assert rs.call_count == 1 @pytest.mark.xfail(on_mac, reason="known broken; see #11127") def test_post_link_run_in_env(self): - test_pkg = '_conda_test_env_activated_when_post_link_executed' + test_pkg = "_conda_test_env_activated_when_post_link_executed" # a non-unicode name must be provided here as activate.d scripts # are not executed on windows, see https://github.com/conda/conda/issues/8241 - with make_temp_env(test_pkg, '-c', 'conda-test') as prefix: + with make_temp_env(test_pkg, "-c", "conda-test") as prefix: assert package_is_installed(prefix, test_pkg) def test_conda_info_python(self): @@ -2310,64 +2871,79 @@ def test_conda_info_python(self): def test_toolz_cytoolz_package_cache_regression(self): with make_temp_env("python=3.5", use_restricted_unicode=on_win) as prefix: - pkgs_dir = join(prefix, 'pkgs') - with env_var('CONDA_PKGS_DIRS', pkgs_dir, stack_callback=conda_tests_ctxt_mgmt_def_pol): + pkgs_dir = join(prefix, "pkgs") + with env_var( + "CONDA_PKGS_DIRS", + pkgs_dir, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): assert context.pkgs_dirs == (pkgs_dir,) - run_command(Commands.INSTALL, prefix, "-c", "conda-forge", "toolz", "cytoolz") - assert package_is_installed(prefix, 'toolz') + run_command( + Commands.INSTALL, prefix, "-c", "conda-forge", "toolz", "cytoolz" + ) + assert package_is_installed(prefix, "toolz") def test_remove_spellcheck(self): with make_temp_env("numpy=1.12") as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'numpy') + assert package_is_installed(prefix, "numpy") with pytest.raises(PackagesNotFoundError) as exc: - run_command(Commands.REMOVE, prefix, 'numpi') + run_command(Commands.REMOVE, prefix, "numpi") - exc_string = '%r' % exc.value - assert exc_string.strip() == dals(""" + exc_string = "%r" % exc.value + assert ( + exc_string.strip() + == dals( + """ PackagesNotFoundError: The following packages are missing from the target environment: - numpi - """).strip() - assert package_is_installed(prefix, 'numpy') + """ + ).strip() + ) + assert package_is_installed(prefix, "numpy") def test_conda_list_json(self): def pkg_info(s): # function from nb_conda/envmanager.py if isinstance(s, str): - name, version, build = s.rsplit('-', 2) - return { - 'name': name, - 'version': version, - 'build': build - } + name, version, build = s.rsplit("-", 2) + return {"name": name, "version": version, "build": build} else: return { - 'name': s['name'], - 'version': s['version'], - 'build': s.get('build_string') or s['build'] + "name": s["name"], + "version": s["version"], + "build": s.get("build_string") or s["build"], } with make_temp_env("python=3") as prefix: - stdout, stderr, _ = run_command(Commands.LIST, prefix, '--json') + stdout, stderr, _ = run_command(Commands.LIST, prefix, "--json") stdout_json = json.loads(stdout) packages = [pkg_info(package) for package in stdout_json] - python_package = next(p for p in packages if p['name'] == 'python') - assert python_package['version'].startswith('3') + python_package = next(p for p in packages if p["name"] == "python") + assert python_package["version"].startswith("3") - @pytest.mark.skipif(context.subdir == "win-32", reason="dependencies not available for win-32") + @pytest.mark.skipif( + context.subdir == "win-32", reason="dependencies not available for win-32" + ) def test_legacy_repodata(self): - channel = join(dirname(abspath(__file__)), 'data', 'legacy_repodata') + channel = join(dirname(abspath(__file__)), "data", "legacy_repodata") subdir = context.subdir if subdir not in ("win-64", "linux-64", "osx-64"): # run test even though default subdir doesn't have dependencies subdir = "linux-64" - with env_var("CONDA_SUBDIR", subdir, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with make_temp_env('python', 'moto=1.3.7', '-c', channel, '--no-deps') as prefix: + with env_var( + "CONDA_SUBDIR", subdir, stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + with make_temp_env( + "python", "moto=1.3.7", "-c", channel, "--no-deps" + ) as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'moto=1.3.7') + assert package_is_installed(prefix, "moto=1.3.7") - @pytest.mark.skipif(context.subdir == "win-32", reason="dependencies not available for win-32") + @pytest.mark.skipif( + context.subdir == "win-32", reason="dependencies not available for win-32" + ) def test_cross_channel_incompatibility(self): # regression test for https://github.com/conda/conda/issues/8772 # conda-forge puts a run_constrains on libboost, which they don't have on conda-forge. @@ -2376,16 +2952,27 @@ def test_cross_channel_incompatibility(self): # if this test passes, we'll hit the DryRunExit exception, instead of an UnsatisfiableError with pytest.raises(DryRunExit): - stdout, stderr, _ = run_command(Commands.CREATE, "dummy_channel_incompat_test", - '--dry-run', '-c', 'conda-forge', 'python', - 'boost==1.70.0', 'boost-cpp==1.70.0', no_capture=True) + stdout, stderr, _ = run_command( + Commands.CREATE, + "dummy_channel_incompat_test", + "--dry-run", + "-c", + "conda-forge", + "python", + "boost==1.70.0", + "boost-cpp==1.70.0", + no_capture=True, + ) # https://github.com/conda/conda/issues/9124 - @pytest.mark.skipif(context.subdir != 'linux-64', reason="lazy; package constraint here only valid on linux-64") + @pytest.mark.skipif( + context.subdir != "linux-64", + reason="lazy; package constraint here only valid on linux-64", + ) def test_neutering_of_historic_specs(self): - with make_temp_env('psutil=5.6.3=py37h7b6447c_0') as prefix: + with make_temp_env("psutil=5.6.3=py37h7b6447c_0") as prefix: stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "python=3.6") - with open(os.path.join(prefix, 'conda-meta', 'history')) as f: + with open(os.path.join(prefix, "conda-meta", "history")) as f: d = f.read() assert re.search(r"neutered specs:.*'psutil==5.6.3'\]", d) # this would be unsatisfiable if the neutered specs were not being factored in correctly. @@ -2393,7 +2980,9 @@ def test_neutering_of_historic_specs(self): stdout, stderr, _ = run_command(Commands.INSTALL, prefix, "imagesize") # https://github.com/conda/conda/issues/10116 - @pytest.mark.skipif(not context.subdir.startswith('linux'), reason="__glibc only available on linux") + @pytest.mark.skipif( + not context.subdir.startswith("linux"), reason="__glibc only available on linux" + ) def test_install_bound_virtual_package(self): with make_temp_env("__glibc>0"): pass @@ -2402,7 +2991,7 @@ def test_install_bound_virtual_package(self): def test_remove_empty_env(self): with make_temp_env() as prefix: run_command(Commands.CREATE, prefix) - run_command(Commands.REMOVE, prefix, '--all') + run_command(Commands.REMOVE, prefix, "--all") def test_remove_ignore_nonenv(self): with tempdir() as test_root: @@ -2416,34 +3005,35 @@ def test_remove_ignore_nonenv(self): with pytest.raises(DirectoryNotACondaEnvironmentError): run_command(Commands.REMOVE, prefix, "--all") - assert(exists(filename)) - assert(exists(prefix)) + assert exists(filename) + assert exists(prefix) @pytest.mark.skipif(True, reason="get the rest of Solve API worked out first") @pytest.mark.integration class PrivateEnvIntegrationTests(TestCase): - def setUp(self): PackageCacheData.clear() - self.pkgs_dirs = ','.join(context.pkgs_dirs) + self.pkgs_dirs = ",".join(context.pkgs_dirs) self.prefix = create_temp_location() run_command(Commands.CREATE, self.prefix) self.preferred_env = "_spiffy-test-app_" - self.preferred_env_prefix = join(self.prefix, 'envs', self.preferred_env) + self.preferred_env_prefix = join(self.prefix, "envs", self.preferred_env) # self.save_path_conflict = os.environ.get('CONDA_PATH_CONFLICT') self.saved_values = {} - self.saved_values['CONDA_ROOT_PREFIX'] = os.environ.get('CONDA_ROOT_PREFIX') - self.saved_values['CONDA_PKGS_DIRS'] = os.environ.get('CONDA_PKGS_DIRS') - self.saved_values['CONDA_ENABLE_PRIVATE_ENVS'] = os.environ.get('CONDA_ENABLE_PRIVATE_ENVS') + self.saved_values["CONDA_ROOT_PREFIX"] = os.environ.get("CONDA_ROOT_PREFIX") + self.saved_values["CONDA_PKGS_DIRS"] = os.environ.get("CONDA_PKGS_DIRS") + self.saved_values["CONDA_ENABLE_PRIVATE_ENVS"] = os.environ.get( + "CONDA_ENABLE_PRIVATE_ENVS" + ) # os.environ['CONDA_PATH_CONFLICT'] = 'prevent' - os.environ['CONDA_ROOT_PREFIX'] = self.prefix - os.environ['CONDA_PKGS_DIRS'] = self.pkgs_dirs - os.environ['CONDA_ENABLE_PRIVATE_ENVS'] = 'true' + os.environ["CONDA_ROOT_PREFIX"] = self.prefix + os.environ["CONDA_PKGS_DIRS"] = self.pkgs_dirs + os.environ["CONDA_ENABLE_PRIVATE_ENVS"] = "true" reset_context() @@ -2460,44 +3050,54 @@ def tearDown(self): def exe_file(self, prefix, exe_name): if on_win: - exe_name = exe_name + '.exe' + exe_name = exe_name + ".exe" return join(prefix, get_bin_directory_short_path(), exe_name) - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_simple_install_uninstall(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # >> simple progression install then uninstall << - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app" + ) assert not package_is_installed(self.prefix, "spiffy-test-app") - assert isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.prefix, "spiffy-test-app")) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - with env_var('YABBA-DABBA', 'doo'): - stdout, stderr, rc = subprocess_call_with_clean_env(self.exe_file(self.prefix, 'spiffy-test-app')) + with env_var("YABBA-DABBA", "doo"): + stdout, stderr, rc = subprocess_call_with_clean_env( + self.exe_file(self.prefix, "spiffy-test-app") + ) assert not stderr assert rc == 0 json_d = json.loads(stdout) - assert json_d['YABBA-DABBA'] == 'doo' + assert json_d["YABBA-DABBA"] == "doo" - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app" + ) assert not package_is_installed(self.prefix, "uses-spiffy-test-app") assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") run_command(Commands.REMOVE, self.prefix, "uses-spiffy-test-app") - assert not package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") + assert not package_is_installed( + self.preferred_env_prefix, "uses-spiffy-test-app" + ) run_command(Commands.REMOVE, self.prefix, "spiffy-test-app") assert not package_is_installed(self.prefix, "spiffy-test-app") - assert not isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.prefix, "spiffy-test-app")) assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - assert not isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_dep_uninstall_base(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # >> install uses-spiffy-test-app, uninstall spiffy-test-app << - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") assert not package_is_installed(self.prefix, "spiffy-test-app") @@ -2506,24 +3106,28 @@ def test_install_dep_uninstall_base(self, prefix_specified): with pytest.raises(PackagesNotFoundError): run_command(Commands.REMOVE, self.prefix, "spiffy-test-app") assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - assert isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") assert not package_is_installed(self.prefix, "spiffy-test-app") - assert not isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.prefix, "spiffy-test-app")) run_command(Commands.REMOVE, self.prefix, "uses-spiffy-test-app") - assert not package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") + assert not package_is_installed( + self.preferred_env_prefix, "uses-spiffy-test-app" + ) # this part tests that the private environment was fully pruned assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - assert not isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_base_1_then_update(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # >> install spiffy-test-app 1.0, then update << - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1" + ) assert package_is_installed(self.prefix, "spiffy-test-app") run_command(Commands.UPDATE, self.prefix, "-c", "conda-test", "spiffy-test-app") @@ -2533,162 +3137,216 @@ def test_install_base_1_then_update(self, prefix_specified): run_command(Commands.REMOVE, self.prefix, "spiffy-test-app") assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_base_then_remove_from_private_env(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # >> install spiffy-test-app, then remove from preferred env << - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") run_command(Commands.REMOVE, self.preferred_env_prefix, "spiffy-test-app") assert not package_is_installed(self.prefix, "spiffy-test-app") - assert not isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.prefix, "spiffy-test-app")) assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - assert not isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_base_1_then_install_base_2(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # >> install spiffy-test-app 1.0, then install spiffy-test-app 2.0 << - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1" + ) assert package_is_installed(self.prefix, "spiffy-test-app") - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=2") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=2" + ) assert not package_is_installed(self.prefix, "spiffy-test-app") assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") run_command(Commands.REMOVE, self.prefix, "spiffy-test-app") assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_base_2_then_install_base_1(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # >> install spiffy-test-app 2.0, then spiffy-test-app 1.0 << - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1" + ) assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app") assert package_is_installed(self.prefix, "spiffy-test-app") - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_base_2_then_install_dep_1(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # install spiffy-test-app 2.0, then uses-spiffy-test-app 1.0, # which should suck spiffy-test-app back to the root prefix - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") assert not package_is_installed(self.prefix, "spiffy-test-app") assert not package_is_installed(self.prefix, "uses-spiffy-test-app") - assert not package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") + assert not package_is_installed( + self.preferred_env_prefix, "uses-spiffy-test-app" + ) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app=1") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app=1" + ) assert package_is_installed(self.prefix, "spiffy-test-app-2") assert package_is_installed(self.prefix, "uses-spiffy-test-app") assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - assert not package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") + assert not package_is_installed( + self.preferred_env_prefix, "uses-spiffy-test-app" + ) - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_dep_2_then_install_base_1(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) # install uses-spiffy-test-app 2.0, then spiffy-test-app 1.0, - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app") - assert not isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.prefix, "spiffy-test-app")) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app=2") assert package_is_installed(self.prefix, "spiffy-test-app=1") - assert isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.prefix, "spiffy-test-app")) - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_install_base_1_dep_2_together(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=1", "uses-spiffy-test-app") + run_command( + Commands.INSTALL, + self.prefix, + "-c", + "conda-test", + "spiffy-test-app=1", + "uses-spiffy-test-app", + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app=2") assert package_is_installed(self.prefix, "spiffy-test-app-1") - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_a2(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "uses-spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app=2") - assert not isfile(self.exe_file(self.prefix, 'spiffy-test-app')) - assert isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.prefix, "spiffy-test-app")) + assert isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "needs-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "needs-spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app=2") assert package_is_installed(self.prefix, "needs-spiffy-test-app") assert not package_is_installed(self.prefix, "uses-spiffy-test-app=2") - assert isfile(self.exe_file(self.prefix, 'spiffy-test-app')) - assert isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.prefix, "spiffy-test-app")) + assert isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) run_command(Commands.REMOVE, self.prefix, "uses-spiffy-test-app") assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") - assert not package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app=2") + assert not package_is_installed( + self.preferred_env_prefix, "uses-spiffy-test-app=2" + ) assert package_is_installed(self.prefix, "needs-spiffy-test-app") assert not package_is_installed(self.prefix, "uses-spiffy-test-app=2") - assert isfile(self.exe_file(self.prefix, 'spiffy-test-app')) - assert not isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.prefix, "spiffy-test-app")) + assert not isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) run_command(Commands.REMOVE, self.prefix, "needs-spiffy-test-app") assert not package_is_installed(self.prefix, "needs-spiffy-test-app") assert package_is_installed(self.prefix, "spiffy-test-app-2") - assert isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.prefix, "spiffy-test-app")) - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_b2(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app", "uses-spiffy-test-app") + run_command( + Commands.INSTALL, + self.prefix, + "-c", + "conda-test", + "spiffy-test-app", + "uses-spiffy-test-app", + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") assert package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app") - assert isfile(self.exe_file(self.prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.prefix, "spiffy-test-app")) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-tes", "needs-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-tes", "needs-spiffy-test-app" + ) assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") - assert not package_is_installed(self.preferred_env_prefix, "uses-spiffy-test-app=2") + assert not package_is_installed( + self.preferred_env_prefix, "uses-spiffy-test-app=2" + ) assert package_is_installed(self.prefix, "needs-spiffy-test-app") assert package_is_installed(self.prefix, "spiffy-test-app=2") assert package_is_installed(self.prefix, "uses-spiffy-test-app") - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_c2(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "needs-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "needs-spiffy-test-app" + ) assert package_is_installed(self.prefix, "spiffy-test-app=2") assert package_is_installed(self.prefix, "needs-spiffy-test-app") assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=2") # nothing to do + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app=2" + ) # nothing to do assert package_is_installed(self.prefix, "spiffy-test-app=2") assert package_is_installed(self.prefix, "needs-spiffy-test-app") assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") - @patch.object(Context, 'prefix_specified') + @patch.object(Context, "prefix_specified") def test_d2(self, prefix_specified): prefix_specified.__get__ = Mock(return_value=False) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "spiffy-test-app" + ) assert package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") - assert isfile(self.exe_file(self.prefix, 'spiffy-test-app')) - assert isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert isfile(self.exe_file(self.prefix, "spiffy-test-app")) + assert isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) - run_command(Commands.INSTALL, self.prefix, "-c", "conda-test", "needs-spiffy-test-app") + run_command( + Commands.INSTALL, self.prefix, "-c", "conda-test", "needs-spiffy-test-app" + ) assert not package_is_installed(self.preferred_env_prefix, "spiffy-test-app=2") assert package_is_installed(self.prefix, "spiffy-test-app=2") assert package_is_installed(self.prefix, "needs-spiffy-test-app") - assert not isfile(self.exe_file(self.preferred_env_prefix, 'spiffy-test-app')) + assert not isfile(self.exe_file(self.preferred_env_prefix, "spiffy-test-app")) diff --git a/tests/test_deprecations.py b/tests/test_deprecations.py index 8abdc01f2f9..ba32cd58675 100644 --- a/tests/test_deprecations.py +++ b/tests/test_deprecations.py @@ -4,7 +4,7 @@ import pytest -from conda.deprecations import DeprecationHandler, DeprecatedError +from conda.deprecations import DeprecatedError, DeprecationHandler @pytest.fixture(scope="module") diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py index cbd787df00b..581fceb7d2b 100644 --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -1,36 +1,34 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - +import getpass import json +import sys from unittest import TestCase from unittest.mock import patch -import sys -import getpass +from pytest import raises from conda.auxlib.collection import AttrDict from conda.auxlib.ish import dals -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context from conda.common.io import captured, env_var, env_vars from conda.exceptions import ( BasicClobberError, BinaryPrefixReplacementError, + ChecksumMismatchError, CommandNotFoundError, CondaHTTPError, CondaKeyError, DirectoryNotFoundError, + ExceptionHandler, KnownPackageClobberError, - ChecksumMismatchError, PackagesNotFoundError, PathNotFoundError, SharedLinkPathClobberError, TooManyArgumentsError, UnknownPackageClobberError, conda_exception_handler, - ExceptionHandler, ) -from pytest import raises def _raise_helper(exception): @@ -46,7 +44,6 @@ def username_not_in_post_mock(post_mock, username): class ExceptionTests(TestCase): - def test_TooManyArgumentsError(self): expected = 2 received = 5 @@ -58,86 +55,134 @@ def test_TooManyArgumentsError(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['exception_name'] == 'TooManyArgumentsError' - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) - assert json_obj['expected'] == 2 - assert json_obj['received'] == 5 - assert json_obj['offending_arguments'] == "groot" + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["exception_name"] == "TooManyArgumentsError" + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) + assert json_obj["expected"] == 2 + assert json_obj["received"] == 5 + assert json_obj["offending_arguments"] == "groot" with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == "TooManyArgumentsError: Got 5 arguments (g, r, o, o, t) but expected 2." + assert ( + c.stderr.strip() + == "TooManyArgumentsError: Got 5 arguments (g, r, o, o, t) but expected 2." + ) def test_BasicClobberError(self): source_path = "some/path/on/goodwin.ave" target_path = "some/path/to/wright.st" exc = BasicClobberError(source_path, target_path, context) repr(exc) - with env_var("CONDA_PATH_CONFLICT", "prevent", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_PATH_CONFLICT", + "prevent", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == dals(""" + assert ( + c.stderr.strip() + == dals( + """ ClobberError: Conda was asked to clobber an existing path. source path: some/path/on/goodwin.ave target path: some/path/to/wright.st - """).strip() + """ + ).strip() + ) def test_KnownPackageClobberError(self): target_path = "some/where/on/goodwin.ave" colliding_dist_being_linked = "Groot" colliding_linked_dist = "Liquid" - exc = KnownPackageClobberError(target_path, colliding_dist_being_linked, colliding_linked_dist, context) - with env_var("CONDA_PATH_CONFLICT", "prevent", stack_callback=conda_tests_ctxt_mgmt_def_pol): + exc = KnownPackageClobberError( + target_path, colliding_dist_being_linked, colliding_linked_dist, context + ) + with env_var( + "CONDA_PATH_CONFLICT", + "prevent", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == dals(""" + assert ( + c.stderr.strip() + == dals( + """ ClobberError: The package 'Groot' cannot be installed due to a path collision for 'some/where/on/goodwin.ave'. This path already exists in the target prefix, and it won't be removed by an uninstall action in this transaction. The path appears to be coming from the package 'Liquid', which is already installed in the prefix. - """).strip() + """ + ).strip() + ) def test_UnknownPackageClobberError(self): target_path = "siebel/center/for/c.s" colliding_dist_being_linked = "Groot" - exc = UnknownPackageClobberError(target_path, colliding_dist_being_linked, context) - with env_var("CONDA_PATH_CONFLICT", "prevent", stack_callback=conda_tests_ctxt_mgmt_def_pol): + exc = UnknownPackageClobberError( + target_path, colliding_dist_being_linked, context + ) + with env_var( + "CONDA_PATH_CONFLICT", + "prevent", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == dals(""" + assert ( + c.stderr.strip() + == dals( + """ ClobberError: The package 'Groot' cannot be installed due to a path collision for 'siebel/center/for/c.s'. This path already exists in the target prefix, and it won't be removed by an uninstall action in this transaction. The path is one that conda doesn't recognize. It may have been created by another package manager. - """).strip() + """ + ).strip() + ) def test_SharedLinkPathClobberError(self): target_path = "some/where/in/shampoo/banana" incompatible_package_dists = "Groot" - exc = SharedLinkPathClobberError(target_path, incompatible_package_dists, context) - with env_var("CONDA_PATH_CONFLICT", "prevent", stack_callback=conda_tests_ctxt_mgmt_def_pol): + exc = SharedLinkPathClobberError( + target_path, incompatible_package_dists, context + ) + with env_var( + "CONDA_PATH_CONFLICT", + "prevent", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == dals(""" + assert ( + c.stderr.strip() + == dals( + """ ClobberError: This transaction has incompatible packages due to a shared path. packages: G, r, o, o, t path: 'some/where/in/shampoo/banana' - """).strip() + """ + ).strip() + ) def test_CondaFileNotFoundError(self): filename = "Groot" @@ -148,10 +193,12 @@ def test_CondaFileNotFoundError(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['exception_name'] == 'PathNotFoundError' - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) + assert ( + json_obj["exception_type"] == "" + ) + assert json_obj["exception_name"] == "PathNotFoundError" + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: @@ -169,11 +216,14 @@ def test_DirectoryNotFoundError(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['exception_name'] == 'DirectoryNotFoundError' - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) - assert json_obj['path'] == "Groot" + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["exception_name"] == "DirectoryNotFoundError" + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) + assert json_obj["path"] == "Groot" with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: @@ -187,34 +237,44 @@ def test_MD5MismatchError(self): target_full_path = "/some/path/on/disk/another-name.tar.bz2" expected_md5sum = "abc123" actual_md5sum = "deadbeef" - exc = ChecksumMismatchError(url, target_full_path, "md5", expected_md5sum, actual_md5sum) + exc = ChecksumMismatchError( + url, target_full_path, "md5", expected_md5sum, actual_md5sum + ) with env_var("CONDA_JSON", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['exception_name'] == 'ChecksumMismatchError' - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) - assert json_obj['url'] == url - assert json_obj['target_full_path'] == target_full_path - assert json_obj['expected_checksum'] == expected_md5sum - assert json_obj['actual_checksum'] == actual_md5sum + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["exception_name"] == "ChecksumMismatchError" + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) + assert json_obj["url"] == url + assert json_obj["target_full_path"] == target_full_path + assert json_obj["expected_checksum"] == expected_md5sum + assert json_obj["actual_checksum"] == actual_md5sum with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == dals(""" + assert ( + c.stderr.strip() + == dals( + """ ChecksumMismatchError: Conda detected a mismatch between the expected content and downloaded content for url 'https://download.url/path/to/file.tar.bz2'. download saved to: /some/path/on/disk/another-name.tar.bz2 expected md5: abc123 actual md5: deadbeef - """).strip() + """ + ).strip() + ) def test_PackageNotFoundError(self): package = "Potato" @@ -225,19 +285,27 @@ def test_PackageNotFoundError(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == dals(""" + assert ( + c.stderr.strip() + == dals( + """ PackagesNotFoundError: The following packages are missing from the target environment: - Potato - """).strip() + """ + ).strip() + ) def test_CondaKeyError(self): key = "Potato" @@ -249,11 +317,11 @@ def test_CondaKeyError(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['exception_name'] == 'CondaKeyError' - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) - assert json_obj['key'] == "Potato" + assert json_obj["exception_type"] == "" + assert json_obj["exception_name"] == "CondaKeyError" + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) + assert json_obj["key"] == "Potato" with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: @@ -276,26 +344,34 @@ def test_CondaHTTPError(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['exception_name'] == 'CondaHTTPError' - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) - assert json_obj['url'] == url - assert json_obj['status_code'] == status_code - assert json_obj['reason'] == reason - assert json_obj['elapsed_time'] == elapsed_time + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["exception_name"] == "CondaHTTPError" + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) + assert json_obj["url"] == url + assert json_obj["status_code"] == status_code + assert json_obj["reason"] == reason + assert json_obj["elapsed_time"] == elapsed_time with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert dals(""" + assert ( + dals( + """ CondaHTTPError: HTTP Potato COULD NOT CONNECT for url Elapsed: 1.24 Potato - """).strip() in c.stderr.strip() + """ + ).strip() + in c.stderr.strip() + ) def test_CommandNotFoundError_simple(self): cmd = "instate" @@ -307,17 +383,22 @@ def test_CommandNotFoundError_simple(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == ("CommandNotFoundError: No command 'conda instate'.\n" - "Did you mean 'conda install'?") + assert c.stderr.strip() == ( + "CommandNotFoundError: No command 'conda instate'.\n" + "Did you mean 'conda install'?" + ) def test_CommandNotFoundError_conda_build(self): cmd = "build" @@ -329,60 +410,94 @@ def test_CommandNotFoundError_conda_build(self): json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == ("CommandNotFoundError: To use 'conda build', install conda-build.") - - @patch('requests.post', side_effect=( - AttrDict(headers=AttrDict(Location='somewhere.else'), status_code=302, - raise_for_status=lambda: None), + assert c.stderr.strip() == ( + "CommandNotFoundError: To use 'conda build', install conda-build." + ) + + @patch( + "requests.post", + side_effect=( + AttrDict( + headers=AttrDict(Location="somewhere.else"), + status_code=302, + raise_for_status=lambda: None, + ), AttrDict(raise_for_status=lambda: None), - )) + ), + ) def test_print_unexpected_error_message_upload_1(self, post_mock): - with env_var('CONDA_REPORT_ERRORS', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_REPORT_ERRORS", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with captured() as c: ExceptionHandler()(_raise_helper, AssertionError()) username = getpass.getuser() assert username_not_in_post_mock(post_mock, username) assert post_mock.call_count == 2 - assert c.stdout == '' + assert c.stdout == "" assert "conda version" in c.stderr - @patch('requests.post', side_effect=( - AttrDict(headers=AttrDict(Location='somewhere.else'), status_code=302, - raise_for_status=lambda: None), - AttrDict(headers=AttrDict(Location='somewhere.again'), status_code=301, - raise_for_status=lambda: None), + @patch( + "requests.post", + side_effect=( + AttrDict( + headers=AttrDict(Location="somewhere.else"), + status_code=302, + raise_for_status=lambda: None, + ), + AttrDict( + headers=AttrDict(Location="somewhere.again"), + status_code=301, + raise_for_status=lambda: None, + ), AttrDict(raise_for_status=lambda: None), - )) + ), + ) def test_print_unexpected_error_message_upload_2(self, post_mock): - with env_var('CONDA_JSON', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_YES', 'yes', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_JSON", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + with env_var( + "CONDA_YES", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with captured() as c: ExceptionHandler()(_raise_helper, AssertionError()) username = getpass.getuser() assert username_not_in_post_mock(post_mock, username) assert post_mock.call_count == 3 - assert len(json.loads(c.stdout)['conda_info']['channels']) >= 2 + assert len(json.loads(c.stdout)["conda_info"]["channels"]) >= 2 assert not c.stderr - @patch('requests.post', side_effect=( - AttrDict(headers=AttrDict(Location='somewhere.else'), status_code=302, - raise_for_status=lambda: None), + @patch( + "requests.post", + side_effect=( + AttrDict( + headers=AttrDict(Location="somewhere.else"), + status_code=302, + raise_for_status=lambda: None, + ), AttrDict(raise_for_status=lambda: None), - )) - @patch('conda.exceptions.input', return_value='y') - @patch('conda.exceptions.os.isatty', return_value=True) - def test_print_unexpected_error_message_upload_3(self, isatty_mock, input_mock, post_mock): + ), + ) + @patch("conda.exceptions.input", return_value="y") + @patch("conda.exceptions.os.isatty", return_value=True) + def test_print_unexpected_error_message_upload_3( + self, isatty_mock, input_mock, post_mock + ): with captured() as c: ExceptionHandler()(_raise_helper, AssertionError()) @@ -390,17 +505,27 @@ def test_print_unexpected_error_message_upload_3(self, isatty_mock, input_mock, assert username_not_in_post_mock(post_mock, username) assert input_mock.call_count == 1 assert post_mock.call_count == 2 - assert c.stdout == '' + assert c.stdout == "" assert "conda version" in c.stderr - @patch('requests.post', side_effect=( - AttrDict(headers=AttrDict(Location='somewhere.else'), status_code=302, - raise_for_status=lambda: None), + @patch( + "requests.post", + side_effect=( + AttrDict( + headers=AttrDict(Location="somewhere.else"), + status_code=302, + raise_for_status=lambda: None, + ), AttrDict(raise_for_status=lambda: None), - )) - @patch('getpass.getuser', return_value='some name') - def test_print_unexpected_error_message_upload_username_with_spaces(self, pwuid, post_mock): - with env_var('CONDA_REPORT_ERRORS', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): + ), + ) + @patch("getpass.getuser", return_value="some name") + def test_print_unexpected_error_message_upload_username_with_spaces( + self, pwuid, post_mock + ): + with env_var( + "CONDA_REPORT_ERRORS", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with captured() as c: ExceptionHandler()(_raise_helper, AssertionError()) @@ -408,17 +533,27 @@ def test_print_unexpected_error_message_upload_username_with_spaces(self, pwuid, assert error_data.get("has_spaces") is True assert error_data.get("is_ascii") is True assert post_mock.call_count == 2 - assert c.stdout == '' + assert c.stdout == "" assert "conda version" in c.stderr - @patch('requests.post', side_effect=( - AttrDict(headers=AttrDict(Location='somewhere.else'), status_code=302, - raise_for_status=lambda: None), + @patch( + "requests.post", + side_effect=( + AttrDict( + headers=AttrDict(Location="somewhere.else"), + status_code=302, + raise_for_status=lambda: None, + ), AttrDict(raise_for_status=lambda: None), - )) - @patch('getpass.getuser', return_value='my√nameΩ') - def test_print_unexpected_error_message_upload_username_with_unicode(self, pwuid, post_mock): - with env_var('CONDA_REPORT_ERRORS', 'true', stack_callback=conda_tests_ctxt_mgmt_def_pol): + ), + ) + @patch("getpass.getuser", return_value="my√nameΩ") + def test_print_unexpected_error_message_upload_username_with_unicode( + self, pwuid, post_mock + ): + with env_var( + "CONDA_REPORT_ERRORS", "true", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): with captured() as c: ExceptionHandler()(_raise_helper, AssertionError()) @@ -426,33 +561,37 @@ def test_print_unexpected_error_message_upload_username_with_unicode(self, pwuid assert error_data.get("has_spaces") is False assert error_data.get("is_ascii") is False assert post_mock.call_count == 2 - assert c.stdout == '' + assert c.stdout == "" assert "conda version" in c.stderr - @patch('requests.post', return_value=None) - @patch('conda.exceptions.input', return_value='n') + @patch("requests.post", return_value=None) + @patch("conda.exceptions.input", return_value="n") def test_print_unexpected_error_message_opt_out_1(self, input_mock, post_mock): - with env_var('CONDA_REPORT_ERRORS', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_REPORT_ERRORS", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): AssertionError() with captured() as c: ExceptionHandler()(_raise_helper, AssertionError()) assert input_mock.call_count == 0 assert post_mock.call_count == 0 - assert c.stdout == '' + assert c.stdout == "" print(c.stderr, file=sys.stderr) assert "conda version" in c.stderr - @patch('requests.post', return_value=None) - @patch('conda.exceptions.input', return_value='n') - @patch('conda.exceptions.os.isatty', return_value=True) - def test_print_unexpected_error_message_opt_out_2(self, isatty_mock, input_mock, post_mock): + @patch("requests.post", return_value=None) + @patch("conda.exceptions.input", return_value="n") + @patch("conda.exceptions.os.isatty", return_value=True) + def test_print_unexpected_error_message_opt_out_2( + self, isatty_mock, input_mock, post_mock + ): with captured() as c: ExceptionHandler()(_raise_helper, AssertionError()) assert input_mock.call_count == 1 assert post_mock.call_count == 0 - assert c.stdout == '' + assert c.stdout == "" assert "conda version" in c.stderr def test_BinaryPrefixReplacementError(self): @@ -461,37 +600,46 @@ def test_BinaryPrefixReplacementError(self): new_prefix = "some/where/on/goodwin.ave" path = "some/where/by/boneyard/creek" placeholder = "save/my/spot/in/374" - exc = BinaryPrefixReplacementError(path, placeholder, new_prefix, - original_data_length, new_data_length) + exc = BinaryPrefixReplacementError( + path, placeholder, new_prefix, original_data_length, new_data_length + ) with env_var("CONDA_JSON", "yes", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) json_obj = json.loads(c.stdout) assert not c.stderr - assert json_obj['exception_type'] == "" - assert json_obj['exception_name'] == 'BinaryPrefixReplacementError' - assert json_obj['message'] == str(exc) - assert json_obj['error'] == repr(exc) - assert json_obj['new_data_length'] == 1104 - assert json_obj['original_data_length'] == 1404 - assert json_obj['new_prefix'] == new_prefix - assert json_obj['path'] == path - assert json_obj['placeholder'] == placeholder + assert ( + json_obj["exception_type"] + == "" + ) + assert json_obj["exception_name"] == "BinaryPrefixReplacementError" + assert json_obj["message"] == str(exc) + assert json_obj["error"] == repr(exc) + assert json_obj["new_data_length"] == 1104 + assert json_obj["original_data_length"] == 1404 + assert json_obj["new_prefix"] == new_prefix + assert json_obj["path"] == path + assert json_obj["placeholder"] == placeholder with env_var("CONDA_JSON", "no", stack_callback=conda_tests_ctxt_mgmt_def_pol): with captured() as c: conda_exception_handler(_raise_helper, exc) assert not c.stdout - assert c.stderr.strip() == dals(""" + assert ( + c.stderr.strip() + == dals( + """ BinaryPrefixReplacementError: Refusing to replace mismatched data length in binary file. path: some/where/by/boneyard/creek placeholder: save/my/spot/in/374 new prefix: some/where/on/goodwin.ave original data Length: 1404 new data length: 1104 - """).strip() + """ + ).strip() + ) def test_PackagesNotFoundError_use_only_tar_bz2(self): note = "use_only_tar_bz2" diff --git a/tests/test_export.py b/tests/test_export.py index 3c59edf02e4..e3c53126000 100644 --- a/tests/test_export.py +++ b/tests/test_export.py @@ -1,33 +1,39 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from os.path import exists, join -from conda.auxlib.compat import Utf8NamedTemporaryFile from unittest import TestCase -from conda.gateways.disk.delete import rm_rf import pytest -from conda.testing.integration import Commands, PYTHON_BINARY, make_temp_env, make_temp_prefix, \ - package_is_installed, run_command +from conda.auxlib.compat import Utf8NamedTemporaryFile +from conda.gateways.disk.delete import rm_rf +from conda.testing.integration import ( + PYTHON_BINARY, + Commands, + make_temp_env, + make_temp_prefix, + package_is_installed, + run_command, +) @pytest.mark.integration class ExportIntegrationTests(TestCase): - def test_basic(self): with make_temp_env("python=3.5") as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "python=3") output, error, _ = run_command(Commands.LIST, prefix, "-e") - with Utf8NamedTemporaryFile(mode="w", suffix="txt", delete=False) as env_txt: + with Utf8NamedTemporaryFile( + mode="w", suffix="txt", delete=False + ) as env_txt: env_txt.write(output) env_txt.flush() env_txt.close() prefix2 = make_temp_prefix() - run_command(Commands.CREATE, prefix2 , "--file", env_txt.name) + run_command(Commands.CREATE, prefix2, "--file", env_txt.name) assert package_is_installed(prefix2, "python") @@ -37,12 +43,12 @@ def test_basic(self): @pytest.mark.skipif(True, reason="Bring back `conda list --export` #3445") def test_multi_channel_export(self): """ - When try to import from txt - every package should come from same channel + When try to import from txt + every package should come from same channel """ with make_temp_env("python=3.5") as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "python=3") run_command(Commands.INSTALL, prefix, "six", "-c", "conda-forge") assert package_is_installed(prefix, "six") @@ -51,11 +57,13 @@ def test_multi_channel_export(self): self.assertIn("conda-forge", output) try: - with Utf8NamedTemporaryFile(mode="w", suffix="txt", delete=False) as env_txt: + with Utf8NamedTemporaryFile( + mode="w", suffix="txt", delete=False + ) as env_txt: env_txt.write(output) env_txt.close() prefix2 = make_temp_prefix() - run_command(Commands.CREATE, prefix2 , "--file", env_txt.name) + run_command(Commands.CREATE, prefix2, "--file", env_txt.name) assert package_is_installed(prefix2, "python") output2, error, _ = run_command(Commands.LIST, prefix2, "-e") @@ -65,12 +73,12 @@ def test_multi_channel_export(self): def test_multi_channel_explicit(self): """ - When try to import from txt - every package should come from same channel + When try to import from txt + every package should come from same channel """ with make_temp_env("python=3.5") as prefix: assert exists(join(prefix, PYTHON_BINARY)) - assert package_is_installed(prefix, 'python=3') + assert package_is_installed(prefix, "python=3") run_command(Commands.INSTALL, prefix, "six", "-c", "conda-forge") assert package_is_installed(prefix, "conda-forge::six") @@ -82,7 +90,9 @@ def test_multi_channel_explicit(self): urls1 = {url for url in output.split() if url.startswith("http")} try: - with Utf8NamedTemporaryFile(mode="w", suffix="txt", delete=False) as env_txt: + with Utf8NamedTemporaryFile( + mode="w", suffix="txt", delete=False + ) as env_txt: env_txt.write(output) env_txt.close() prefix2 = make_temp_prefix() diff --git a/tests/test_exports.py b/tests/test_exports.py index de2f514fa5b..957c9cd16c1 100644 --- a/tests/test_exports.py +++ b/tests/test_exports.py @@ -1,23 +1,28 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - from conda.common.compat import on_win def test_exports(): import conda.exports + assert conda.exports.PaddingError def test_conda_subprocess(): import os - from subprocess import Popen, PIPE + from subprocess import PIPE, Popen + import conda try: - p = Popen(['echo', '"%s"' % conda.__version__], env=os.environ, stdout=PIPE, stderr=PIPE, - shell=on_win) + p = Popen( + ["echo", '"%s"' % conda.__version__], + env=os.environ, + stdout=PIPE, + stderr=PIPE, + shell=on_win, + ) except TypeError: for k, v in os.environ.items(): if type(k) != str or type(v) != str: diff --git a/tests/test_fetch.py b/tests/test_fetch.py index bd279ccf3e3..133778d970a 100644 --- a/tests/test_fetch.py +++ b/tests/test_fetch.py @@ -1,29 +1,39 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +from os.path import exists, isfile +from tempfile import mktemp from unittest import TestCase import pytest import responses -from os.path import exists, isfile -from tempfile import mktemp from conda.base.constants import DEFAULT_CHANNEL_ALIAS from conda.base.context import conda_tests_ctxt_mgmt_def_pol from conda.common.io import env_var +from conda.core.package_cache_data import download +from conda.core.subdir_data import fetch_repodata_remote_request from conda.exceptions import CondaHTTPError from conda.gateways.connection.download import TmpDownload -from conda.core.subdir_data import fetch_repodata_remote_request -from conda.core.package_cache_data import download @pytest.mark.integration class TestConnectionWithShortTimeouts(TestCase): - def test_download_connectionerror(self): - with env_var('CONDA_REMOTE_CONNECT_TIMEOUT_SECS', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_REMOTE_READ_TIMEOUT_SECS', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_REMOTE_MAX_RETRIES', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_REMOTE_CONNECT_TIMEOUT_SECS", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with env_var( + "CONDA_REMOTE_READ_TIMEOUT_SECS", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with env_var( + "CONDA_REMOTE_MAX_RETRIES", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with pytest.raises(CondaHTTPError) as execinfo: url = "http://240.0.0.0/" msg = "Connection error:" @@ -31,10 +41,23 @@ def test_download_connectionerror(self): assert msg in str(execinfo) def test_fetchrepodate_connectionerror(self): - with env_var('CONDA_REMOTE_CONNECT_TIMEOUT_SECS', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_REMOTE_READ_TIMEOUT_SECS', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_REMOTE_MAX_RETRIES', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_REMOTE_CONNECT_TIMEOUT_SECS", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with env_var( + "CONDA_REMOTE_READ_TIMEOUT_SECS", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with env_var( + "CONDA_REMOTE_MAX_RETRIES", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): from conda.base.context import context + assert context.remote_connect_timeout_secs == 1 assert context.remote_read_timeout_secs == 1 assert context.remote_max_retries == 1 @@ -45,9 +68,21 @@ def test_fetchrepodate_connectionerror(self): assert msg in str(execinfo) def test_tmpDownload(self): - with env_var('CONDA_REMOTE_CONNECT_TIMEOUT_SECS', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_REMOTE_READ_TIMEOUT_SECS', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): - with env_var('CONDA_REMOTE_MAX_RETRIES', 1, stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_REMOTE_CONNECT_TIMEOUT_SECS", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with env_var( + "CONDA_REMOTE_READ_TIMEOUT_SECS", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): + with env_var( + "CONDA_REMOTE_MAX_RETRIES", + 1, + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): url = "https://repo.anaconda.com/pkgs/free/osx-64/appscript-1.0.1-py27_0.tar.bz2" with TmpDownload(url) as dst: assert exists(dst) @@ -75,13 +110,17 @@ class TestFetchRepoData(TestCase): class TestDownload(TestCase): - @responses.activate def test_download_httperror(self): with pytest.raises(CondaHTTPError) as execinfo: url = DEFAULT_CHANNEL_ALIAS msg = "HTTPError:" - responses.add(responses.GET, url, body='{"error": "not found"}', status=404, - content_type='application/json') + responses.add( + responses.GET, + url, + body='{"error": "not found"}', + status=404, + content_type="application/json", + ) download(url, mktemp()) assert msg in str(execinfo) diff --git a/tests/test_file_permissions.py b/tests/test_file_permissions.py index 313e66f7d6c..156594bf07c 100644 --- a/tests/test_file_permissions.py +++ b/tests/test_file_permissions.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - # import unittest # from os.path import dirname, join # from conda import file_permissions diff --git a/tests/test_history.py b/tests/test_history.py index b064abc21e4..13131c4a804 100644 --- a/tests/test_history.py +++ b/tests/test_history.py @@ -1,28 +1,26 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import unittest from os.path import dirname from pprint import pprint -import unittest from unittest import mock +from conda.history import History from conda.testing.cases import BaseTestCase from conda.testing.integration import make_temp_prefix -from conda.history import History - class HistoryTestCase(BaseTestCase): def test_works_as_context_manager(self): h = History("/path/to/prefix") - self.assertTrue(getattr(h, '__enter__')) - self.assertTrue(getattr(h, '__exit__')) + self.assertTrue(getattr(h, "__enter__")) + self.assertTrue(getattr(h, "__exit__")) def test_calls_update_on_exit(self): h = History("/path/to/prefix") - with mock.patch.object(h, 'init_log_file') as init_log_file: + with mock.patch.object(h, "init_log_file") as init_log_file: init_log_file.return_value = None - with mock.patch.object(h, 'update') as update: + with mock.patch.object(h, "update") as update: with h: self.assertEqual(0, update.call_count) pass @@ -30,14 +28,14 @@ def test_calls_update_on_exit(self): def test_returns_history_object_as_context_object(self): h = History("/path/to/prefix") - with mock.patch.object(h, 'init_log_file') as init_log_file: + with mock.patch.object(h, "init_log_file") as init_log_file: init_log_file.return_value = None - with mock.patch.object(h, 'update'): + with mock.patch.object(h, "update"): with h as h2: self.assertEqual(h, h2) def test_empty_history_check_on_empty_env(self): - with mock.patch.object(History, 'file_is_empty') as mock_file_is_empty: + with mock.patch.object(History, "file_is_empty") as mock_file_is_empty: with History(make_temp_prefix()) as h: self.assertEqual(mock_file_is_empty.call_count, 0) self.assertEqual(mock_file_is_empty.call_count, 0) @@ -46,7 +44,7 @@ def test_empty_history_check_on_empty_env(self): assert not h.file_is_empty() def test_parse_on_empty_env(self): - with mock.patch.object(History, 'parse') as mock_parse: + with mock.patch.object(History, "parse") as mock_parse: with History(make_temp_prefix(name=str(self.tmpdir))) as h: self.assertEqual(mock_parse.call_count, 0) self.assertEqual(len(h.parse()), 0) @@ -54,7 +52,6 @@ def test_parse_on_empty_env(self): class UserRequestsTestCase(unittest.TestCase): - h = History(dirname(__file__)) user_requests = h.get_user_requests() @@ -62,32 +59,46 @@ def test_len(self): self.assertEqual(len(self.user_requests), 6) def test_0(self): - self.assertEqual(self.user_requests[0], - {'cmd': ['conda', 'update', 'conda'], - 'date': '2016-02-16 13:31:33', - 'unlink_dists': (), - 'link_dists': (), - }) + self.assertEqual( + self.user_requests[0], + { + "cmd": ["conda", "update", "conda"], + "date": "2016-02-16 13:31:33", + "unlink_dists": (), + "link_dists": (), + }, + ) def test_last(self): - self.assertEqual(self.user_requests[-1], - {'action': 'install', - 'cmd': ['conda', 'install', 'pyflakes'], - 'date': '2016-02-18 22:53:20', - 'specs': ['pyflakes', 'conda', 'python 2.7*'], - 'update_specs': ['pyflakes', 'conda', 'python 2.7*'], - 'unlink_dists': (), - 'link_dists': ['+pyflakes-1.0.0-py27_0'], - }) + self.assertEqual( + self.user_requests[-1], + { + "action": "install", + "cmd": ["conda", "install", "pyflakes"], + "date": "2016-02-18 22:53:20", + "specs": ["pyflakes", "conda", "python 2.7*"], + "update_specs": ["pyflakes", "conda", "python 2.7*"], + "unlink_dists": (), + "link_dists": ["+pyflakes-1.0.0-py27_0"], + }, + ) def test_conda_comment_version_parsing(self): - assert History._parse_comment_line("# conda version: 4.5.1") == {"conda_version": "4.5.1"} - assert History._parse_comment_line("# conda version: 4.5.1rc1") == {"conda_version": "4.5.1rc1"} - assert History._parse_comment_line("# conda version: 4.5.1dev0") == {"conda_version": "4.5.1dev0"} + assert History._parse_comment_line("# conda version: 4.5.1") == { + "conda_version": "4.5.1" + } + assert History._parse_comment_line("# conda version: 4.5.1rc1") == { + "conda_version": "4.5.1rc1" + } + assert History._parse_comment_line("# conda version: 4.5.1dev0") == { + "conda_version": "4.5.1dev0" + } def test_specs_line_parsing_44(self): # New format (>=4.4) - item = History._parse_comment_line("# update specs: [\"param[version='>=1.5.1,<2.0']\"]") + item = History._parse_comment_line( + "# update specs: [\"param[version='>=1.5.1,<2.0']\"]" + ) pprint(item) assert item == { "action": "update", @@ -101,79 +112,87 @@ def test_specs_line_parsing_44(self): def test_specs_line_parsing_43(self): # Old format (<4.4) - item = History._parse_comment_line('# install specs: param >=1.5.1,<2.0') + item = History._parse_comment_line("# install specs: param >=1.5.1,<2.0") pprint(item) assert item == { - 'action': 'install', - 'specs': [ - 'param >=1.5.1,<2.0', + "action": "install", + "specs": [ + "param >=1.5.1,<2.0", ], - 'update_specs': [ - 'param >=1.5.1,<2.0', + "update_specs": [ + "param >=1.5.1,<2.0", ], } - item = History._parse_comment_line('# install specs: param >=1.5.1,<2.0,0packagename >=1.0.0,<2.0') + item = History._parse_comment_line( + "# install specs: param >=1.5.1,<2.0,0packagename >=1.0.0,<2.0" + ) pprint(item) assert item == { - 'action': 'install', - 'specs': [ - 'param >=1.5.1,<2.0', - '0packagename >=1.0.0,<2.0', + "action": "install", + "specs": [ + "param >=1.5.1,<2.0", + "0packagename >=1.0.0,<2.0", ], - 'update_specs': [ - 'param >=1.5.1,<2.0', - '0packagename >=1.0.0,<2.0', + "update_specs": [ + "param >=1.5.1,<2.0", + "0packagename >=1.0.0,<2.0", ], } - item = History._parse_comment_line('# install specs: python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0,numpy >=1.11.0,<2.0,pandas >=0.19.2,<1.0,psycopg2 >=2.6.1,<3.0,pyyaml >=3.12,<4.0,scipy >=0.17.0,<1.0') + item = History._parse_comment_line( + "# install specs: python>=3.5.1,jupyter >=1.0.0,<2.0,matplotlib >=1.5.1,<2.0,numpy >=1.11.0,<2.0,pandas >=0.19.2,<1.0,psycopg2 >=2.6.1,<3.0,pyyaml >=3.12,<4.0,scipy >=0.17.0,<1.0" + ) pprint(item) assert item == { - 'action': 'install', - 'specs': [ - 'python>=3.5.1', - 'jupyter >=1.0.0,<2.0', - 'matplotlib >=1.5.1,<2.0', - 'numpy >=1.11.0,<2.0', - 'pandas >=0.19.2,<1.0', - 'psycopg2 >=2.6.1,<3.0', - 'pyyaml >=3.12,<4.0', - 'scipy >=0.17.0,<1.0', + "action": "install", + "specs": [ + "python>=3.5.1", + "jupyter >=1.0.0,<2.0", + "matplotlib >=1.5.1,<2.0", + "numpy >=1.11.0,<2.0", + "pandas >=0.19.2,<1.0", + "psycopg2 >=2.6.1,<3.0", + "pyyaml >=3.12,<4.0", + "scipy >=0.17.0,<1.0", ], - 'update_specs': [ - 'python>=3.5.1', - 'jupyter >=1.0.0,<2.0', - 'matplotlib >=1.5.1,<2.0', - 'numpy >=1.11.0,<2.0', - 'pandas >=0.19.2,<1.0', - 'psycopg2 >=2.6.1,<3.0', - 'pyyaml >=3.12,<4.0', - 'scipy >=0.17.0,<1.0', + "update_specs": [ + "python>=3.5.1", + "jupyter >=1.0.0,<2.0", + "matplotlib >=1.5.1,<2.0", + "numpy >=1.11.0,<2.0", + "pandas >=0.19.2,<1.0", + "psycopg2 >=2.6.1,<3.0", + "pyyaml >=3.12,<4.0", + "scipy >=0.17.0,<1.0", ], } - item = History._parse_comment_line('# install specs: _license >=1.0.0,<2.0') + item = History._parse_comment_line("# install specs: _license >=1.0.0,<2.0") pprint(item) assert item == { - 'action': 'install', - 'specs': [ - '_license >=1.0.0,<2.0', + "action": "install", + "specs": [ + "_license >=1.0.0,<2.0", ], - 'update_specs': [ - '_license >=1.0.0,<2.0', + "update_specs": [ + "_license >=1.0.0,<2.0", ], } - item = History._parse_comment_line('# install specs: pandas,_license >=1.0.0,<2.0') + item = History._parse_comment_line( + "# install specs: pandas,_license >=1.0.0,<2.0" + ) pprint(item) assert item == { - 'action': 'install', - 'specs': [ - 'pandas', '_license >=1.0.0,<2.0', + "action": "install", + "specs": [ + "pandas", + "_license >=1.0.0,<2.0", ], - 'update_specs': [ - 'pandas', '_license >=1.0.0,<2.0', + "update_specs": [ + "pandas", + "_license >=1.0.0,<2.0", ], } diff --git a/tests/test_import.py b/tests/test_import.py index 66e9fbd4b76..73d03323fff 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - """ Test if we can import everything from conda. This basically tests syntax correctness and whether the internal imports work. Created to test py3k compatibility. @@ -9,6 +8,7 @@ import os import unittest + import conda from conda.utils import on_win @@ -16,11 +16,10 @@ class TestImportAllConda(unittest.TestCase): - def _test_import(self, subpackage): # Prepare prefix = PREFIX - module_prefix = 'conda' + module_prefix = "conda" if subpackage: prefix = os.path.join(prefix, subpackage) module_prefix = f"{module_prefix}.{subpackage}" @@ -31,29 +30,28 @@ def _test_import(self, subpackage): # Import each module in given (sub)package for fname in os.listdir(prefix): # Discard files that are not of interest - if fname.startswith('__'): + if fname.startswith("__"): continue - elif not fname.endswith('.py'): + elif not fname.endswith(".py"): continue - elif fname.startswith('windows') and not on_win: + elif fname.startswith("windows") and not on_win: continue - elif fname == 'distro.py': + elif fname == "distro.py": continue # Import - modname = module_prefix + '.' + fname.split('.')[0] - print('importing', modname) + modname = module_prefix + "." + fname.split(".")[0] + print("importing", modname) __import__(modname) - def test_import_root(self): - self._test_import('') + self._test_import("") def test_import_cli(self): - self._test_import('cli') + self._test_import("cli") def test_import_progressbar(self): - self._test_import('_vendor') + self._test_import("_vendor") -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_info.py b/tests/test_info.py index 1d15f733d48..0ea3f0a28ba 100644 --- a/tests/test_info.py +++ b/tests/test_info.py @@ -1,7 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - import json import sys from unittest.mock import patch @@ -28,9 +26,9 @@ def test_info(): ): assert_in(name, conda_info_out) - conda_info_e_out, conda_info_e_err, rc = run_command(Commands.INFO, '-e') - assert_in('base', conda_info_e_out) - assert_equals(conda_info_e_err, '') + conda_info_e_out, conda_info_e_err, rc = run_command(Commands.INFO, "-e") + assert_in("base", conda_info_e_out) + assert_equals(conda_info_e_err, "") conda_info_s_out, conda_info_s_err, rc = run_command(Commands.INFO, "-s") assert_equals(conda_info_s_err, "") @@ -44,8 +42,8 @@ def test_info(): ): assert_in(name, conda_info_s_out) - conda_info_all_out, conda_info_all_err, rc = run_command(Commands.INFO, '--all') - assert_equals(conda_info_all_err, '') + conda_info_all_out, conda_info_all_err, rc = run_command(Commands.INFO, "--all") + assert_equals(conda_info_all_err, "") assert_in(conda_info_out, conda_info_all_out) assert_in(conda_info_e_out, conda_info_all_out) assert_in(conda_info_s_out, conda_info_all_out) @@ -59,7 +57,9 @@ def test_info(): def test_info_package_json(): # This is testing deprecated behaviour. The CLI already says: # WARNING: 'conda info package_name' is deprecated. Use 'conda search package_name --info'. - with env_var("CONDA_CHANNELS", "defaults", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CHANNELS", "defaults", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): out, err, rc = run_command(Commands.INFO, "--json", "itsdangerous=1.0.0=py37_0") out = json.loads(out) @@ -67,7 +67,9 @@ def test_info_package_json(): assert len(out["itsdangerous=1.0.0=py37_0"]) == 1 assert isinstance(out["itsdangerous=1.0.0=py37_0"], list) - with env_var("CONDA_CHANNELS", "defaults", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_CHANNELS", "defaults", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): out, err, rc = run_command(Commands.INFO, "--json", "itsdangerous") out = json.loads(out) @@ -77,23 +79,38 @@ def test_info_package_json(): @pytest.mark.skipif(True, reason="only temporary") -@patch('conda.cli.conda_argparse.do_call', side_effect=KeyError('blarg')) +@patch("conda.cli.conda_argparse.do_call", side_effect=KeyError("blarg")) def test_get_info_dict(cli_install_mock): # This test patches conda.cli.install.install to throw an artificial exception. # What we're looking for here is the proper behavior for how error reports work with # collecting `conda info` in this situation. - with env_var('CONDA_REPORT_ERRORS', 'false', stack_callback=conda_tests_ctxt_mgmt_def_pol): - out, err, _ = run_command(Commands.CREATE, "-n", "blargblargblarg", "blarg", "--dry-run", "--json", - use_exception_handler=True) + with env_var( + "CONDA_REPORT_ERRORS", "false", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + out, err, _ = run_command( + Commands.CREATE, + "-n", + "blargblargblarg", + "blarg", + "--dry-run", + "--json", + use_exception_handler=True, + ) assert cli_install_mock.call_count == 1 sys.stdout.write(out) sys.stderr.write(err) assert not err json_obj = json.loads(out) - assert json_obj['conda_info']['conda_version'] - - out, err, _ = run_command(Commands.CREATE, "-n", "blargblargblarg", "blarg", "--dry-run", - use_exception_handler=True) + assert json_obj["conda_info"]["conda_version"] + + out, err, _ = run_command( + Commands.CREATE, + "-n", + "blargblargblarg", + "blarg", + "--dry-run", + use_exception_handler=True, + ) sys.stderr.write(out) sys.stderr.write(err) assert "conda info could not be constructed" not in err diff --git a/tests/test_install.py b/tests/test_install.py index ce5b295e263..c945de76d2f 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -1,6 +1,16 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +import random +import shutil +import subprocess +import sys +import tempfile +import unittest +from os import chdir, getcwd, makedirs +from os.path import exists, join, relpath +from unittest import mock +import pytest from conda.base.context import context from conda.common.compat import on_win @@ -9,27 +19,18 @@ from conda.gateways.disk.delete import move_path_to_trash from conda.gateways.disk.read import read_no_link, yield_lines from conda.models.enums import FileMode -from os import chdir, getcwd, makedirs -from os.path import exists, join, relpath -import pytest -import random -import shutil -import subprocess -import sys -import tempfile -import unittest -from unittest import mock patch = mock.patch if mock else None def generate_random_path(): - return '/some/path/to/file%s' % random.randint(100, 200) + return "/some/path/to/file%s" % random.randint(100, 200) class TestBinaryReplace(unittest.TestCase): - - @pytest.mark.xfail(on_win, reason="binary replacement on windows skipped", strict=True) + @pytest.mark.xfail( + on_win, reason="binary replacement on windows skipped", strict=True + ) def test_simple(self): for encoding in ("utf-8", "utf-16-le", "utf-16-be", "utf-32-le", "utf-32-be"): a = "aaaaa".encode(encoding) @@ -38,38 +39,57 @@ def test_simple(self): result = "xxxbbbbxyz\0\0zz".encode(encoding) self.assertEqual(binary_replace(data, a, b, encoding=encoding), result) - @pytest.mark.xfail(on_win, reason="binary replacement on windows skipped", strict=True) + @pytest.mark.xfail( + on_win, reason="binary replacement on windows skipped", strict=True + ) def test_shorter(self): self.assertEqual( - binary_replace(b"xxxaaaaaxyz\x00zz", b"aaaaa", b"bbbb"), b"xxxbbbbxyz\x00\x00zz" + binary_replace(b"xxxaaaaaxyz\x00zz", b"aaaaa", b"bbbb"), + b"xxxbbbbxyz\x00\x00zz", ) - @pytest.mark.xfail(on_win, reason="binary replacement on windows skipped", strict=True) + @pytest.mark.xfail( + on_win, reason="binary replacement on windows skipped", strict=True + ) def test_too_long(self): self.assertRaises( _PaddingError, binary_replace, b"xxxaaaaaxyz\x00zz", b"aaaaa", b"bbbbbbbb" ) - @pytest.mark.xfail(on_win, reason="binary replacement on windows skipped", strict=True) + @pytest.mark.xfail( + on_win, reason="binary replacement on windows skipped", strict=True + ) def test_no_extra(self): self.assertEqual(binary_replace(b"aaaaa\x00", b"aaaaa", b"bbbbb"), b"bbbbb\x00") - @pytest.mark.xfail(on_win, reason="binary replacement on windows skipped", strict=True) + @pytest.mark.xfail( + on_win, reason="binary replacement on windows skipped", strict=True + ) def test_two(self): self.assertEqual( binary_replace(b"aaaaa\x001234aaaaacc\x00\x00", b"aaaaa", b"bbbbb"), b"bbbbb\x001234bbbbbcc\x00\x00", ) - @pytest.mark.xfail(on_win, reason="binary replacement on windows skipped", strict=True) + @pytest.mark.xfail( + on_win, reason="binary replacement on windows skipped", strict=True + ) def test_spaces(self): self.assertEqual(binary_replace(b" aaaa \x00", b"aaaa", b"bbbb"), b" bbbb \x00") - @pytest.mark.xfail(on_win, reason="binary replacement on windows skipped", strict=True) + @pytest.mark.xfail( + on_win, reason="binary replacement on windows skipped", strict=True + ) def test_multiple(self): - self.assertEqual(binary_replace(b"aaaacaaaa\x00", b"aaaa", b"bbbb"), b"bbbbcbbbb\x00") - self.assertEqual(binary_replace(b"aaaacaaaa\x00", b"aaaa", b"bbb"), b"bbbcbbb\x00\x00\x00") - self.assertRaises(_PaddingError, binary_replace, b"aaaacaaaa\x00", b"aaaa", b"bbbbb") + self.assertEqual( + binary_replace(b"aaaacaaaa\x00", b"aaaa", b"bbbb"), b"bbbbcbbbb\x00" + ) + self.assertEqual( + binary_replace(b"aaaacaaaa\x00", b"aaaa", b"bbb"), b"bbbcbbb\x00\x00\x00" + ) + self.assertRaises( + _PaddingError, binary_replace, b"aaaacaaaa\x00", b"aaaa", b"bbbbb" + ) @pytest.mark.integration @pytest.mark.skipif(not on_win, reason="exe entry points only necessary on win") @@ -83,40 +103,52 @@ def test_windows_entry_point(self): chdir(tmp_dir) original_prefix = "C:\\BogusPrefix\\python.exe" try: - url = 'https://s3.amazonaws.com/conda-dev/pyzzerw.pyz' - download(url, 'pyzzerw.pyz') - url = 'https://files.pythonhosted.org/packages/source/c/conda/conda-4.1.6.tar.gz' - download(url, 'conda-4.1.6.tar.gz') - subprocess.check_call([sys.executable, 'pyzzerw.pyz', - # output file - '-o', 'conda.exe', - # entry point - '-m', 'conda.cli.main:main', - # initial shebang - '-s', '#! ' + original_prefix, - # launcher executable to use (32-bit text should be compatible) - '-l', 't32', - # source archive to turn into executable - 'conda-4.1.6.tar.gz', - ], - cwd=tmp_dir) + url = "https://s3.amazonaws.com/conda-dev/pyzzerw.pyz" + download(url, "pyzzerw.pyz") + url = "https://files.pythonhosted.org/packages/source/c/conda/conda-4.1.6.tar.gz" + download(url, "conda-4.1.6.tar.gz") + subprocess.check_call( + [ + sys.executable, + "pyzzerw.pyz", + # output file + "-o", + "conda.exe", + # entry point + "-m", + "conda.cli.main:main", + # initial shebang + "-s", + "#! " + original_prefix, + # launcher executable to use (32-bit text should be compatible) + "-l", + "t32", + # source archive to turn into executable + "conda-4.1.6.tar.gz", + ], + cwd=tmp_dir, + ) # this is the actual test: change the embedded prefix and make sure that the exe runs. - data = open('conda.exe', 'rb').read() + data = open("conda.exe", "rb").read() fixed_data = binary_replace(data, original_prefix, sys.executable) - with open("conda.fixed.exe", 'wb') as f: + with open("conda.fixed.exe", "wb") as f: f.write(fixed_data) # without a valid shebang in the exe, this should fail with pytest.raises(subprocess.CalledProcessError): - subprocess.check_call(['conda.exe', '-h']) + subprocess.check_call(["conda.exe", "-h"]) - process = subprocess.Popen(['conda.fixed.exe', '-h'], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + process = subprocess.Popen( + ["conda.fixed.exe", "-h"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) output, error = process.communicate() - output = output.decode('utf-8') - error = error.decode('utf-8') - assert ("conda is a tool for managing and deploying applications, " - "environments and packages.") in output + output = output.decode("utf-8") + error = error.decode("utf-8") + assert ( + "conda is a tool for managing and deploying applications, " + "environments and packages." + ) in output except: raise finally: @@ -124,46 +156,47 @@ def test_windows_entry_point(self): class FileTests(unittest.TestCase): - def setUp(self): self.tmpdir = tempfile.mkdtemp() - self.tmpfname = join(self.tmpdir, 'testfile') + self.tmpfname = join(self.tmpdir, "testfile") def tearDown(self): shutil.rmtree(self.tmpdir) def test_default_text(self): - with open(self.tmpfname, 'w') as fo: - fo.write('#!/opt/anaconda1anaconda2anaconda3/bin/python\n' - 'echo "Hello"\n') - update_prefix(self.tmpfname, '/usr/local') + with open(self.tmpfname, "w") as fo: + fo.write("#!/opt/anaconda1anaconda2anaconda3/bin/python\n" 'echo "Hello"\n') + update_prefix(self.tmpfname, "/usr/local") with open(self.tmpfname) as fi: data = fi.read() - self.assertEqual(data, '#!/usr/local/bin/python\n' - 'echo "Hello"\n') + self.assertEqual(data, "#!/usr/local/bin/python\n" 'echo "Hello"\n') @pytest.mark.skipif(on_win, reason="test is invalid on windows") def test_long_default_text(self): with open(self.tmpfname, "w") as fo: - fo.write("#!/opt/anaconda1anaconda2anaconda3/bin/python -O\n" 'echo "Hello"\n') + fo.write( + "#!/opt/anaconda1anaconda2anaconda3/bin/python -O\n" 'echo "Hello"\n' + ) new_prefix = "/usr/local/{}".format("1234567890" * 52) update_prefix(self.tmpfname, new_prefix) with open(self.tmpfname) as fi: data = fi.read() - self.assertEqual(data, '#!/usr/bin/env python -O\n' - 'echo "Hello"\n') + self.assertEqual(data, "#!/usr/bin/env python -O\n" 'echo "Hello"\n') @pytest.mark.skipif(on_win, reason="no binary replacement done on win") def test_binary(self): - with open(self.tmpfname, 'wb') as fo: - fo.write(b'\x7fELF.../some-placeholder/lib/libfoo.so\0') - update_prefix(self.tmpfname, '/usr/local', - placeholder='/some-placeholder', mode=FileMode.binary) - with open(self.tmpfname, 'rb') as fi: + with open(self.tmpfname, "wb") as fo: + fo.write(b"\x7fELF.../some-placeholder/lib/libfoo.so\0") + update_prefix( + self.tmpfname, + "/usr/local", + placeholder="/some-placeholder", + mode=FileMode.binary, + ) + with open(self.tmpfname, "rb") as fi: data = fi.read() self.assertEqual( - data, - b'\x7fELF.../usr/local/lib/libfoo.so\0\0\0\0\0\0\0\0' + data, b"\x7fELF.../usr/local/lib/libfoo.so\0\0\0\0\0\0\0\0" ) def test_trash_outside_prefix(self): @@ -501,27 +534,28 @@ def test_trash_outside_prefix(self): def _make_lines_file(path): - with open(path, 'w') as fh: + with open(path, "w") as fh: fh.write("line 1\n") fh.write("line 2\n") fh.write("# line 3\n") fh.write("line 4\n") + def test_yield_lines(tmpdir): tempfile = join(str(tmpdir), "testfile") _make_lines_file(tempfile) lines = list(yield_lines(tempfile)) - assert lines == ['line 1', 'line 2', 'line 4'] + assert lines == ["line 1", "line 2", "line 4"] def test_read_no_link(tmpdir): tempdir = str(tmpdir) - no_link = join(tempdir, 'no_link') - no_softlink = join(tempdir, 'no_softlink') + no_link = join(tempdir, "no_link") + no_softlink = join(tempdir, "no_softlink") _make_lines_file(no_link) s1 = read_no_link(tempdir) - assert s1 == {'line 1', 'line 2', 'line 4'} + assert s1 == {"line 1", "line 2", "line 4"} _make_lines_file(no_softlink) s2 = read_no_link(tempdir) - assert s2 == {'line 1', 'line 2', 'line 4'} + assert s2 == {"line 1", "line 2", "line 4"} diff --git a/tests/test_instructions.py b/tests/test_instructions.py index e82e8364115..3b5d3bfafef 100644 --- a/tests/test_instructions.py +++ b/tests/test_instructions.py @@ -1,14 +1,13 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - -import unittest -from logging import Handler, DEBUG import os +import unittest +from logging import DEBUG, Handler from conda import instructions +from conda.exceptions import CondaFileIOError from conda.exports import execute_instructions from conda.instructions import commands -from conda.exceptions import CondaFileIOError def test_expected_operation_order(): @@ -39,7 +38,6 @@ def handle(self, record): class TestExecutePlan(unittest.TestCase): def test_simple_instruction(self): - index = {"This is an index": True} def simple_cmd(state, arg): @@ -56,7 +54,6 @@ def simple_cmd(state, arg): self.assertTrue(simple_cmd.call_args, ("arg1",)) def test_state(self): - index = {"This is an index": True} def simple_cmd(state, arg): diff --git a/tests/test_link_order.py b/tests/test_link_order.py index 4ff007cde6a..37c8c28fd81 100644 --- a/tests/test_link_order.py +++ b/tests/test_link_order.py @@ -1,17 +1,17 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - - -from logging import getLogger -import unittest -import pytest import shutil import tempfile +import unittest +from logging import getLogger -from conda.testing.integration import run_command, Commands +import pytest + +from conda.testing.integration import Commands, run_command log = getLogger(__name__) + class TestLinkOrder(unittest.TestCase): def setUp(self): self.prefix = tempfile.mkdtemp() @@ -21,10 +21,14 @@ def tearDown(self): @pytest.mark.integration def test_link_order_post_link_actions(self): - stdout, stderr, _ = run_command(Commands.CREATE, self.prefix, "c_post_link_package", "-c", "conda-test") - assert(stderr == '') + stdout, stderr, _ = run_command( + Commands.CREATE, self.prefix, "c_post_link_package", "-c", "conda-test" + ) + assert stderr == "" @pytest.mark.integration def test_link_order_post_link_depend(self): - stdout, stderr, _ = run_command(Commands.CREATE, self.prefix, "e_post_link_package", "-c", "conda-test") - assert(stderr == '') + stdout, stderr, _ = run_command( + Commands.CREATE, self.prefix, "e_post_link_package", "-c", "conda-test" + ) + assert stderr == "" diff --git a/tests/test_lock.py b/tests/test_lock.py index 9aceb390649..2d68c72f89f 100644 --- a/tests/test_lock.py +++ b/tests/test_lock.py @@ -1,14 +1,15 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause +from os.path import basename, exists, isfile, join import pytest + from conda.lock import DirectoryLock, FileLock, LockError -from os.path import basename, exists, isfile, join def test_filelock_passes(tmpdir): """ - Normal test on file lock + Normal test on file lock """ package_name = "conda_file1" tmpfile = join(tmpdir.strpath, package_name) @@ -22,8 +23,8 @@ def test_filelock_passes(tmpdir): def test_filelock_locks(tmpdir): """ - Test on file lock, multiple lock on same file - Lock error should raised + Test on file lock, multiple lock on same file + Lock error should raised """ package_name = "conda_file_2" tmpfile = join(tmpdir.strpath, package_name) @@ -44,12 +45,11 @@ def test_filelock_locks(tmpdir): def test_folder_locks(tmpdir): """ - Test on Directory lock + Test on Directory lock """ package_name = "dir_1" tmpfile = join(tmpdir.strpath, package_name) with DirectoryLock(tmpfile) as lock1: - assert exists(lock1.lock_file_path) and isfile(lock1.lock_file_path) with pytest.raises(LockError): @@ -64,9 +64,10 @@ def test_folder_locks(tmpdir): def test_lock_thread(tmpdir): """ - 2 thread want to lock a file - One thread will have LockError Raised + 2 thread want to lock a file + One thread will have LockError Raised """ + def lock_thread(tmpdir, file_path): with FileLock(file_path) as lock1: path = basename(lock1.lock_file_path) @@ -74,6 +75,7 @@ def lock_thread(tmpdir, file_path): assert not tmpdir.join(path).exists() from threading import Thread + package_name = "conda_file_3" tmpfile = join(tmpdir.strpath, package_name) t = Thread(target=lock_thread, args=(tmpdir, tmpfile)) @@ -90,10 +92,11 @@ def lock_thread(tmpdir, file_path): def test_lock_retries(tmpdir): """ - 2 thread want to lock a same file - Lock has zero retries - One thread will have LockError raised + 2 thread want to lock a same file + Lock has zero retries + One thread will have LockError raised """ + def lock_thread_retries(tmpdir, file_path): with pytest.raises(LockError) as execinfo: with FileLock(file_path, retries=0): @@ -101,6 +104,7 @@ def lock_thread_retries(tmpdir, file_path): assert "LOCKERROR" in str(execinfo.value) from threading import Thread + package_name = "conda_file_3" tmpfile = join(tmpdir.strpath, package_name) t = Thread(target=lock_thread_retries, args=(tmpdir, tmpfile)) @@ -117,14 +121,14 @@ def lock_thread_retries(tmpdir, file_path): def test_permission_file(): """ - Test when lock cannot be created due to permission - Make sure no exception raised + Test when lock cannot be created due to permission + Make sure no exception raised """ from conda.auxlib.compat import Utf8NamedTemporaryFile - with Utf8NamedTemporaryFile(mode='r') as f: + + with Utf8NamedTemporaryFile(mode="r") as f: if not isinstance(f.name, str): return with FileLock(f.name) as lock: - path = basename(lock.lock_file_path) assert not exists(join(f.name, path)) diff --git a/tests/test_logic.py b/tests/test_logic.py index 9e708665461..62f35fca55f 100644 --- a/tests/test_logic.py +++ b/tests/test_logic.py @@ -1,14 +1,12 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from itertools import chain, combinations, permutations, product import pytest -from conda.common.logic import Clauses, FALSE, TRUE, minimal_unsatisfiable_subset +from conda.common.logic import FALSE, TRUE, Clauses, minimal_unsatisfiable_subset from conda.testing.helpers import raises - # These routines implement logical tests with short-circuiting # and propagation of unknown values: # - positive integers are variables @@ -27,7 +25,7 @@ def my_NOT(x): if isinstance(x, int): return -x if isinstance(x, str): - return x[1:] if x[0] == '!' else '!' + x + return x[1:] if x[0] == "!" else "!" + x return None @@ -35,20 +33,20 @@ def my_ABS(x): if isinstance(x, int): return abs(x) if isinstance(x, str): - return x[1:] if x[0] == '!' else x + return x[1:] if x[0] == "!" else x return None def my_OR(*args): - '''Implements a logical OR according to the logic: - - positive integers are variables - - negative integers are negations of positive variables - - TRUE and FALSE are fixed values - - None is an unknown value - TRUE OR x -> TRUE - FALSE OR x -> FALSE - None OR x -> None - x OR y -> None''' + """Implements a logical OR according to the logic: + - positive integers are variables + - negative integers are negations of positive variables + - TRUE and FALSE are fixed values + - None is an unknown value + TRUE OR x -> TRUE + FALSE OR x -> FALSE + None OR x -> None + x OR y -> None""" if any(v == TRUE for v in args): return TRUE args = {v for v in args if v != FALSE} @@ -97,6 +95,7 @@ def my_EVAL(eq, sol): # _evaluate_eq doesn't handle TRUE/FALSE entries return _evaluate_eq(eq, sol) + sum(c for c, a in eq if a == TRUE) + # Testing strategy: mechanically construct a all possible permutations of # True, False, variables from 1 to m, and their negations, in order to exercise # all logical branches of the function. Test negative, positive, and full @@ -104,25 +103,23 @@ def my_EVAL(eq, sol): def my_TEST(Mfunc, Cfunc, mmin, mmax, is_iter): - for m in range(mmin, mmax+1): + for m in range(mmin, mmax + 1): if m == 0: ijprod = [()] else: - ijprod = (TRUE, FALSE) + sum(((k, my_NOT(k)) for k in range(1, m+1)), ()) + ijprod = (TRUE, FALSE) + sum(((k, my_NOT(k)) for k in range(1, m + 1)), ()) ijprod = product(ijprod, repeat=m) for ij in ijprod: C = Clauses() Cpos = Clauses() Cneg = Clauses() - for k in range(1, m+1): - nm = 'x%d' % k + for k in range(1, m + 1): + nm = "x%d" % k C.new_var(nm) Cpos.new_var(nm) Cneg.new_var(nm) ij2 = tuple( - C.from_index(k) - if isinstance(k, int) and k not in {TRUE, FALSE} - else k + C.from_index(k) if isinstance(k, int) and k not in {TRUE, FALSE} else k for k in ij ) if is_iter: @@ -136,21 +133,37 @@ def my_TEST(Mfunc, Cfunc, mmin, mmax, is_iter): tsol = Mfunc(*ij) if tsol in {TRUE, FALSE}: assert x == tsol, (ij2, Cfunc.__name__, C.as_list()) - assert Cpos.unsat == (tsol != TRUE) and not Cpos.as_list(), (ij, 'Require(%s)') - assert Cneg.unsat == (tsol == TRUE) and not Cneg.as_list(), (ij, 'Prevent(%s)') + assert Cpos.unsat == (tsol != TRUE) and not Cpos.as_list(), ( + ij, + "Require(%s)", + ) + assert Cneg.unsat == (tsol == TRUE) and not Cneg.as_list(), ( + ij, + "Prevent(%s)", + ) continue for sol in C.itersolve([(x,)]): qsol = Mfunc(*my_SOL(ij, sol)) assert qsol == TRUE, (ij2, sol, Cfunc.__name__, C.as_list()) for sol in Cpos.itersolve([]): qsol = Mfunc(*my_SOL(ij, sol)) - assert qsol == TRUE, (ij, sol, 'Require(%s)' % Cfunc.__name__, Cpos.as_list()) + assert qsol == TRUE, ( + ij, + sol, + "Require(%s)" % Cfunc.__name__, + Cpos.as_list(), + ) for sol in C.itersolve([(C.Not(x),)]): qsol = Mfunc(*my_SOL(ij, sol)) assert qsol == FALSE, (ij2, sol, Cfunc.__name__, C.as_list()) for sol in Cneg.itersolve([]): qsol = Mfunc(*my_SOL(ij, sol)) - assert qsol == FALSE, (ij, sol, 'Prevent(%s)' % Cfunc.__name__, Cneg.as_list()) + assert qsol == FALSE, ( + ij, + sol, + "Prevent(%s)" % Cfunc.__name__, + Cneg.as_list(), + ) def test_NOT(): @@ -167,12 +180,12 @@ def test_ALL(): def test_OR(): - my_TEST(my_OR, Clauses.Or, 2, 2, False) + my_TEST(my_OR, Clauses.Or, 2, 2, False) @pytest.mark.integration # only because this test is slow def test_ANY(): - my_TEST(my_OR, Clauses.Any, 0, 4, True) + my_TEST(my_OR, Clauses.Any, 0, 4, True) def test_XOR(): @@ -206,23 +219,88 @@ def test_LinearBound(): L = [ ([], [0, 1], 10), ([], [1, 2], 10), - ({'x1': 2, 'x2': 2}, [3, 3], 10), - ({'x1': 2, 'x2': 2}, [0, 1], 1000), - ({'x1': 1, 'x2': 2}, [0, 2], 1000), - ({'x1': 2, '!x2': 2}, [0, 2], 1000), + ({"x1": 2, "x2": 2}, [3, 3], 10), + ({"x1": 2, "x2": 2}, [0, 1], 1000), + ({"x1": 1, "x2": 2}, [0, 2], 1000), + ({"x1": 2, "!x2": 2}, [0, 2], 1000), ([(1, 1), (2, 2), (3, 3)], [3, 3], 1000), ([(0, 1), (1, 2), (2, 3), (0, 4), (1, 5), (0, 6), (1, 7)], [0, 2], 1000), - ([(0, 1), (1, 2), (2, 3), (0, 4), (1, 5), (0, 6), (1, 7), - (3, FALSE), (2, TRUE)], [2, 4], 1000), - ([(1, 15), (2, 16), (3, 17), (4, 18), (5, 6), (5, 19), (6, 7), - (6, 20), (7, 8), (7, 21), (7, 28), (8, 9), (8, 22), (8, 29), (8, 41), (9, 10), - (9, 23), (9, 30), (9, 42), (10, 1), (10, 11), (10, 24), (10, 31), - (10, 34), (10, 37), (10, 43), (10, 46), (10, 50), (11, 2), (11, 12), (11, 25), - (11, 32), (11, 35), (11, 38), (11, 44), (11, 47), (11, 51), (12, 3), - (12, 4), (12, 5), (12, 13), (12, 14), (12, 26), (12, 27), (12, 33), (12, 36), - (12, 39), (12, 40), (12, 45), (12, 48), (12, 49), (12, 52), (12, 53), - (12, 54)], [192, 204], 100), - ] + ( + [ + (0, 1), + (1, 2), + (2, 3), + (0, 4), + (1, 5), + (0, 6), + (1, 7), + (3, FALSE), + (2, TRUE), + ], + [2, 4], + 1000, + ), + ( + [ + (1, 15), + (2, 16), + (3, 17), + (4, 18), + (5, 6), + (5, 19), + (6, 7), + (6, 20), + (7, 8), + (7, 21), + (7, 28), + (8, 9), + (8, 22), + (8, 29), + (8, 41), + (9, 10), + (9, 23), + (9, 30), + (9, 42), + (10, 1), + (10, 11), + (10, 24), + (10, 31), + (10, 34), + (10, 37), + (10, 43), + (10, 46), + (10, 50), + (11, 2), + (11, 12), + (11, 25), + (11, 32), + (11, 35), + (11, 38), + (11, 44), + (11, 47), + (11, 51), + (12, 3), + (12, 4), + (12, 5), + (12, 13), + (12, 14), + (12, 26), + (12, 27), + (12, 33), + (12, 36), + (12, 39), + (12, 40), + (12, 45), + (12, 48), + (12, 49), + (12, 52), + (12, 53), + (12, 54), + ], + [192, 204], + 100, + ), + ] for eq, rhs, max_iter in L: if isinstance(eq, dict): N = len(eq) @@ -232,8 +310,8 @@ def test_LinearBound(): Cpos = Clauses(N) Cneg = Clauses(N) if isinstance(eq, dict): - for k in range(1, N+1): - nm = 'x%d' % k + for k in range(1, N + 1): + nm = "x%d" % k C.name_var(k, nm) Cpos.name_var(k, nm) Cneg.name_var(k, nm) @@ -244,29 +322,33 @@ def test_LinearBound(): Cpos.Require(Cpos.LinearBound, eq, rhs[0], rhs[1]) Cneg.Prevent(Cneg.LinearBound, eq, rhs[0], rhs[1]) if x != FALSE: - for _, sol in zip(range(max_iter), C.itersolve([] if x == TRUE else [(x,)], N)): + for _, sol in zip( + range(max_iter), C.itersolve([] if x == TRUE else [(x,)], N) + ): assert rhs[0] <= my_EVAL(eq2, sol) <= rhs[1], C.as_list() if x != TRUE: - for _, sol in zip(range(max_iter), C.itersolve([] if x == TRUE else [(C.Not(x),)], N)): - assert not(rhs[0] <= my_EVAL(eq2, sol) <= rhs[1]), C.as_list() + for _, sol in zip( + range(max_iter), C.itersolve([] if x == TRUE else [(C.Not(x),)], N) + ): + assert not (rhs[0] <= my_EVAL(eq2, sol) <= rhs[1]), C.as_list() for _, sol in zip(range(max_iter), Cpos.itersolve([], N)): - assert rhs[0] <= my_EVAL(eq2, sol) <= rhs[1], ('Cpos', Cpos.as_list()) + assert rhs[0] <= my_EVAL(eq2, sol) <= rhs[1], ("Cpos", Cpos.as_list()) for _, sol in zip(range(max_iter), Cneg.itersolve([], N)): - assert not(rhs[0] <= my_EVAL(eq2, sol) <= rhs[1]), ('Cneg', Cneg.as_list()) + assert not (rhs[0] <= my_EVAL(eq2, sol) <= rhs[1]), ("Cneg", Cneg.as_list()) def test_sat(): C = Clauses() - C.new_var('x1') - C.new_var('x2') + C.new_var("x1") + C.new_var("x2") assert C.sat() is not None assert C.sat([]) is not None assert C.sat([()]) is None assert C.sat([(FALSE,)]) is None assert C.sat([(TRUE,), ()]) is None assert C.sat([(TRUE, FALSE, -1)]) is not None - assert C.sat([(+1, FALSE), (+2,), (TRUE,)], names=True) == {'x1', 'x2'} - assert C.sat([(-1, FALSE), (TRUE,), (+2,)], names=True) == {'x2'} + assert C.sat([(+1, FALSE), (+2,), (TRUE,)], names=True) == {"x1", "x2"} + assert C.sat([(-1, FALSE), (TRUE,), (+2,)], names=True) == {"x2"} assert C.sat([(TRUE,), (-1,), (-2, FALSE)], names=True) == set() assert C.sat([(+1,), (-1, FALSE)], names=True) is None C._clauses.unsat = True @@ -298,14 +380,30 @@ def test_minimize(): assert sval == 11 -@pytest.mark.xfail(reason="Broke this with reworking minimal_unsatisfiable_set. Not sure how to fix. minimal_unsatisfiable_subset function is otherwise working well.") +@pytest.mark.xfail( + reason="Broke this with reworking minimal_unsatisfiable_set. Not sure how to fix. minimal_unsatisfiable_subset function is otherwise working well." +) def test_minimal_unsatisfiable_subset(): def sat(val): return Clauses(max(abs(v) for v in chain(*val))).sat(val) + assert raises(ValueError, lambda: minimal_unsatisfiable_subset([[1]], sat)) - clauses = [[-10], [1], [5], [2, 3], [3, 4], [5, 2], [-7], [2], [3], - [-2, -3, 5], [7, 8, 9, 10], [-8], [-9]] + clauses = [ + [-10], + [1], + [5], + [2, 3], + [3, 4], + [5, 2], + [-7], + [2], + [3], + [-2, -3, 5], + [7, 8, 9, 10], + [-8], + [-9], + ] res = minimal_unsatisfiable_subset(clauses, sat) assert sorted(res) == [[-10], [-9], [-8], [-7], [7, 8, 9, 10]] assert not sat(res) diff --git a/tests/test_misc.py b/tests/test_misc.py index 8b1d0368505..3acb3b6e503 100644 --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -18,7 +18,9 @@ def test_Utf8NamedTemporaryFile(): try: with Utf8NamedTemporaryFile(delete=False) as tf: tf.write( - test_string.encode("utf-8") if hasattr(test_string, "encode") else test_string + test_string.encode("utf-8") + if hasattr(test_string, "encode") + else test_string ) fname = tf.name with codecs.open(fname, mode="rb", encoding="utf-8") as fh: @@ -85,7 +87,6 @@ def test_explicit_missing_cache_entries(mocker): from conda.core.package_cache_data import PackageCacheData with make_temp_env() as prefix: # ensure writable env - if len(PackageCacheData.get_all_extracted_entries()) == 0: # Package cache e.g. ./devenv/Darwin/x86_64/envs/devenv-3.9-c/pkgs/ can # be empty in certain cases (Noted in OSX with Python 3.9, when @@ -136,7 +137,12 @@ def test_walk_prefix(tmpdir): # tmpdir is a py.test utility # walk_prefix has windows_forward_slahes on by default, so we don't need # any special-casing there - answer = {"testfile1", "bin/testfile", "testdir1/testfile", "testdir1/testdir2/testfile"} + answer = { + "testfile1", + "bin/testfile", + "testdir1/testfile", + "testdir1/testdir2/testfile", + } if sys.platform != "darwin": answer.add("python.app") diff --git a/tests/test_plan.py b/tests/test_plan.py index 6567854a327..fd4bda32a91 100644 --- a/tests/test_plan.py +++ b/tests/test_plan.py @@ -1,35 +1,38 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import random +import unittest from collections import defaultdict, namedtuple from contextlib import contextmanager from functools import partial from os.path import join -import random -import unittest from unittest import mock import pytest +import conda.instructions as inst from conda import CondaError -from conda.base.context import context, stack_context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context, stack_context from conda.cli.python_api import Commands, run_command from conda.common.io import env_var from conda.core.solve import get_pinned_specs from conda.exceptions import PackagesNotFoundError +from conda.exports import execute_plan from conda.gateways.disk.create import mkdir_p -import conda.instructions as inst from conda.models.channel import Channel from conda.models.dist import Dist -from conda.models.records import PackageRecord from conda.models.match_spec import MatchSpec -from conda.plan import display_actions, add_unlink, add_defaults_to_specs, _update_old_plan as update_old_plan -from conda.exports import execute_plan +from conda.models.records import PackageRecord +from conda.plan import _update_old_plan as update_old_plan +from conda.plan import add_defaults_to_specs, add_unlink, display_actions from conda.testing.helpers import captured, get_index_r_1 from .gateways.disk.test_permissions import tempdir -index, r, = get_index_r_1() +( + index, + r, +) = get_index_r_1() index = index.copy() # create a shallow copy so this module can mutate state @@ -45,7 +48,7 @@ def DPkg(s, **kwargs): name=d.name, version=d.version, build=d.build_string, - build_number=int(d.build_string.rsplit('_', 1)[-1]), + build_number=int(d.build_string.rsplit("_", 1)[-1]), channel=d.channel, subdir=context.subdir, md5="012345789", @@ -53,6 +56,7 @@ def DPkg(s, **kwargs): _kwargs.update(kwargs) return PackageRecord(**_kwargs) + def solve(specs): return [Dist.from_string(fn) for fn in r.solve(specs)] @@ -64,6 +68,7 @@ def generate_random_dist(self): @contextmanager def mock_platform(self, windows=False): from conda import plan + with mock.patch.object(plan, "sys") as sys: sys.platform = "win32" if windows else "not win32" yield sys @@ -74,7 +79,12 @@ def test_simply_adds_unlink_on_non_windows(self): with self.mock_platform(windows=False): add_unlink(actions, dist) self.assertIn(inst.UNLINK, actions) - self.assertEqual(actions[inst.UNLINK], [dist, ]) + self.assertEqual( + actions[inst.UNLINK], + [ + dist, + ], + ) def test_adds_to_existing_actions(self): actions = {inst.UNLINK: [{"foo": "bar"}]} @@ -90,22 +100,30 @@ class TestAddDeaultsToSpec(unittest.TestCase): def check(self, specs, added): new_specs = list(specs + added) add_defaults_to_specs(r, self.linked, specs) - specs = [s.split(' (')[0] for s in specs] + specs = [s.split(" (")[0] for s in specs] self.assertEqual(specs, new_specs) def test_display_actions_0(): - with env_var('CONDA_SHOW_CHANNEL_URLS', 'False', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_SHOW_CHANNEL_URLS", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): actions = defaultdict(list) - actions.update({"FETCH": [ - get_matchspec_from_index(index, "channel-1::sympy==0.7.2=py27_0"), - get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py27_0"), - ]}) + actions.update( + { + "FETCH": [ + get_matchspec_from_index(index, "channel-1::sympy==0.7.2=py27_0"), + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py27_0"), + ] + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -119,24 +137,28 @@ def test_display_actions_0(): Total: 9.9 MB """ + ) actions = defaultdict(list) - actions.update({ - 'PREFIX': '/Users/aaronmeurer/anaconda/envs/test', - 'SYMLINK_CONDA': ['/Users/aaronmeurer/anaconda'], - 'LINK': [ - get_matchspec_from_index(index, "channel-1::python==3.3.2=0"), - get_matchspec_from_index(index, "channel-1::readline==6.2=0"), - get_matchspec_from_index(index, "channel-1::sqlite==3.7.13=0"), - get_matchspec_from_index(index, "channel-1::tk==8.5.13=0"), - get_matchspec_from_index(index, "channel-1::zlib==1.2.7=0"), - ] - }) + actions.update( + { + "PREFIX": "/Users/aaronmeurer/anaconda/envs/test", + "SYMLINK_CONDA": ["/Users/aaronmeurer/anaconda"], + "LINK": [ + get_matchspec_from_index(index, "channel-1::python==3.3.2=0"), + get_matchspec_from_index(index, "channel-1::readline==6.2=0"), + get_matchspec_from_index(index, "channel-1::sqlite==3.7.13=0"), + get_matchspec_from_index(index, "channel-1::tk==8.5.13=0"), + get_matchspec_from_index(index, "channel-1::zlib==1.2.7=0"), + ], + } + ) with captured() as c: display_actions(actions, index) - - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## environment location: /Users/aaronmeurer/anaconda/envs/test @@ -151,14 +173,17 @@ def test_display_actions_0(): zlib: 1.2.7-0 \n\ """ + ) - actions['UNLINK'] = actions['LINK'] - actions['LINK'] = [] + actions["UNLINK"] = actions["LINK"] + actions["LINK"] = [] with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## environment location: /Users/aaronmeurer/anaconda/envs/test @@ -173,21 +198,26 @@ def test_display_actions_0(): zlib: 1.2.7-0 \n\ """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0"), - ], - 'UNLINK': [ - get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), - ], - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -196,13 +226,16 @@ def test_display_actions_0(): cython: 0.19-py33_0 --> 0.19.1-py33_0 """ + ) - actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"] with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -211,24 +244,30 @@ def test_display_actions_0(): cython: 0.19.1-py33_0 --> 0.19-py33_0 """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19.1=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==1.5=py33_0'), - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_0'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==2.1=py33_1'), - get_matchspec_from_index(index, 'channel-1::pip==1.3.1=py33_1'), - ]}) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==1.5=py33_0"), + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==2.1=py33_1"), + get_matchspec_from_index(index, "channel-1::pip==1.3.1=py33_1"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -249,23 +288,28 @@ def test_display_actions_0(): dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19.1=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==2.1=py33_1'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==1.5=py33_0'), - ], - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==2.1=py33_1"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==1.5=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -275,13 +319,16 @@ def test_display_actions_0(): dateutil: 1.5-py33_0 --> 2.1-py33_1 \n\ """ + ) - actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"] with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -291,25 +338,36 @@ def test_display_actions_0(): dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ """ + ) def test_display_actions_show_channel_urls(): - with env_var('CONDA_SHOW_CHANNEL_URLS', 'True', stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_SHOW_CHANNEL_URLS", "True", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): actions = defaultdict(list) - sympy_prec = PackageRecord.from_objects(get_matchspec_from_index(index, 'channel-1::sympy==0.7.2=py27_0')) - numpy_prec = PackageRecord.from_objects(get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py27_0")) + sympy_prec = PackageRecord.from_objects( + get_matchspec_from_index(index, "channel-1::sympy==0.7.2=py27_0") + ) + numpy_prec = PackageRecord.from_objects( + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py27_0") + ) numpy_prec.channel = sympy_prec.channel = Channel(None) - actions.update({ - "FETCH": [ - sympy_prec, - numpy_prec, - ] - }) + actions.update( + { + "FETCH": [ + sympy_prec, + numpy_prec, + ] + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -323,26 +381,31 @@ def test_display_actions_show_channel_urls(): Total: 9.9 MB """ + ) actions = defaultdict(list) - actions.update({ - 'PREFIX': '/Users/aaronmeurer/anaconda/envs/test', - 'SYMLINK_CONDA': [ - '/Users/aaronmeurer/anaconda', - ], - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::python==3.3.2=0'), - get_matchspec_from_index(index, 'channel-1::readline==6.2=0'), - get_matchspec_from_index(index, 'channel-1::sqlite==3.7.13=0'), - get_matchspec_from_index(index, 'channel-1::tk==8.5.13=0'), - get_matchspec_from_index(index, 'channel-1::zlib==1.2.7=0'), - ] - }) + actions.update( + { + "PREFIX": "/Users/aaronmeurer/anaconda/envs/test", + "SYMLINK_CONDA": [ + "/Users/aaronmeurer/anaconda", + ], + "LINK": [ + get_matchspec_from_index(index, "channel-1::python==3.3.2=0"), + get_matchspec_from_index(index, "channel-1::readline==6.2=0"), + get_matchspec_from_index(index, "channel-1::sqlite==3.7.13=0"), + get_matchspec_from_index(index, "channel-1::tk==8.5.13=0"), + get_matchspec_from_index(index, "channel-1::zlib==1.2.7=0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## environment location: /Users/aaronmeurer/anaconda/envs/test @@ -357,14 +420,17 @@ def test_display_actions_show_channel_urls(): zlib: 1.2.7-0 channel-1 """ + ) - actions['UNLINK'] = actions['LINK'] - actions['LINK'] = [] + actions["UNLINK"] = actions["LINK"] + actions["LINK"] = [] with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## environment location: /Users/aaronmeurer/anaconda/envs/test @@ -379,21 +445,26 @@ def test_display_actions_show_channel_urls(): zlib: 1.2.7-0 channel-1 """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19.1=py33_0'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -402,13 +473,16 @@ def test_display_actions_show_channel_urls(): cython: 0.19-py33_0 channel-1 --> 0.19.1-py33_0 channel-1 """ + ) - actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"] with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -417,25 +491,30 @@ def test_display_actions_show_channel_urls(): cython: 0.19.1-py33_0 channel-1 --> 0.19-py33_0 channel-1 """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19.1=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==1.5=py33_0'), - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_0'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==2.1=py33_1'), - get_matchspec_from_index(index, 'channel-1::pip==1.3.1=py33_1'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==1.5=py33_0"), + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==2.1=py33_1"), + get_matchspec_from_index(index, "channel-1::pip==1.3.1=py33_1"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -456,23 +535,28 @@ def test_display_actions_show_channel_urls(): dateutil: 2.1-py33_1 channel-1 --> 1.5-py33_0 channel-1 """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19.1=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==2.1=py33_1'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - get_matchspec_from_index(index, 'channel-1::dateutil==1.5=py33_0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==2.1=py33_1"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + get_matchspec_from_index(index, "channel-1::dateutil==1.5=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -482,13 +566,16 @@ def test_display_actions_show_channel_urls(): dateutil: 1.5-py33_0 channel-1 --> 2.1-py33_1 channel-1 """ + ) - actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"] with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -498,28 +585,36 @@ def test_display_actions_show_channel_urls(): dateutil: 2.1-py33_1 channel-1 --> 1.5-py33_0 channel-1 """ + ) - cython_prec = PackageRecord.from_objects(get_matchspec_from_index(index, 'channel-1::cython==0.19.1=py33_0')) - dateutil_prec = PackageRecord.from_objects(get_matchspec_from_index(index, 'channel-1::dateutil==1.5=py33_0')) + cython_prec = PackageRecord.from_objects( + get_matchspec_from_index(index, "channel-1::cython==0.19.1=py33_0") + ) + dateutil_prec = PackageRecord.from_objects( + get_matchspec_from_index(index, "channel-1::dateutil==1.5=py33_0") + ) cython_prec.channel = dateutil_prec.channel = Channel("my_channel") actions = defaultdict(list) - actions.update({ - 'LINK': [ - cython_prec, - get_matchspec_from_index(index, 'channel-1::dateutil==2.1=py33_1'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - dateutil_prec, - ] - }) - + actions.update( + { + "LINK": [ + cython_prec, + get_matchspec_from_index(index, "channel-1::dateutil==2.1=py33_1"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + dateutil_prec, + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -529,13 +624,16 @@ def test_display_actions_show_channel_urls(): dateutil: 1.5-py33_0 my_channel --> 2.1-py33_1 channel-1 \n\ """ + ) - actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + actions["LINK"], actions["UNLINK"] = actions["UNLINK"], actions["LINK"] with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -545,20 +643,40 @@ def test_display_actions_show_channel_urls(): dateutil: 2.1-py33_1 channel-1 --> 1.5-py33_0 my_channel """ + ) -@pytest.mark.xfail(strict=True, reason="Not reporting link type until refactoring display_actions " - "after txn.verify()") +@pytest.mark.xfail( + strict=True, + reason="Not reporting link type until refactoring display_actions " + "after txn.verify()", +) def test_display_actions_link_type(): - with env_var('CONDA_SHOW_CHANNEL_URLS', 'False', stack_callback=conda_tests_ctxt_mgmt_def_pol): - - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 2', 'dateutil-1.5-py33_0 2', - 'numpy-1.7.1-py33_0 2', 'python-3.3.2-0 2', 'readline-6.2-0 2', 'sqlite-3.7.13-0 2', 'tk-8.5.13-0 2', 'zlib-1.2.7-0 2']}) + with env_var( + "CONDA_SHOW_CHANNEL_URLS", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + actions = defaultdict( + list, + { + "LINK": [ + "cython-0.19.1-py33_0 2", + "dateutil-1.5-py33_0 2", + "numpy-1.7.1-py33_0 2", + "python-3.3.2-0 2", + "readline-6.2-0 2", + "sqlite-3.7.13-0 2", + "tk-8.5.13-0 2", + "zlib-1.2.7-0 2", + ] + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following NEW packages will be INSTALLED: cython: 0.19.1-py33_0 (softlink) @@ -571,44 +689,74 @@ def test_display_actions_link_type(): zlib: 1.2.7-0 (softlink) """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 2', - 'dateutil-2.1-py33_1 2'], 'UNLINK': ['cython-0.19-py33_0', - 'dateutil-1.5-py33_0']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19.1-py33_0 2", "dateutil-2.1-py33_1 2"], + "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be UPDATED: cython: 0.19-py33_0 --> 0.19.1-py33_0 (softlink) dateutil: 1.5-py33_0 --> 2.1-py33_1 (softlink) """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 2', - 'dateutil-1.5-py33_0 2'], 'UNLINK': ['cython-0.19.1-py33_0', - 'dateutil-2.1-py33_1']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19-py33_0 2", "dateutil-1.5-py33_0 2"], + "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be DOWNGRADED: cython: 0.19.1-py33_0 --> 0.19-py33_0 (softlink) dateutil: 2.1-py33_1 --> 1.5-py33_0 (softlink) """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 1', 'dateutil-1.5-py33_0 1', - 'numpy-1.7.1-py33_0 1', 'python-3.3.2-0 1', 'readline-6.2-0 1', 'sqlite-3.7.13-0 1', 'tk-8.5.13-0 1', 'zlib-1.2.7-0 1']}) + actions = defaultdict( + list, + { + "LINK": [ + "cython-0.19.1-py33_0 1", + "dateutil-1.5-py33_0 1", + "numpy-1.7.1-py33_0 1", + "python-3.3.2-0 1", + "readline-6.2-0 1", + "sqlite-3.7.13-0 1", + "tk-8.5.13-0 1", + "zlib-1.2.7-0 1", + ] + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following NEW packages will be INSTALLED: cython: 0.19.1-py33_0 @@ -621,44 +769,74 @@ def test_display_actions_link_type(): zlib: 1.2.7-0 \n\ """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 1', - 'dateutil-2.1-py33_1 1'], 'UNLINK': ['cython-0.19-py33_0', - 'dateutil-1.5-py33_0']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19.1-py33_0 1", "dateutil-2.1-py33_1 1"], + "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be UPDATED: cython: 0.19-py33_0 --> 0.19.1-py33_0 dateutil: 1.5-py33_0 --> 2.1-py33_1 \n\ """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 1', - 'dateutil-1.5-py33_0 1'], 'UNLINK': ['cython-0.19.1-py33_0', - 'dateutil-2.1-py33_1']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19-py33_0 1", "dateutil-1.5-py33_0 1"], + "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be DOWNGRADED: cython: 0.19.1-py33_0 --> 0.19-py33_0 dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3', 'dateutil-1.5-py33_0 3', - 'numpy-1.7.1-py33_0 3', 'python-3.3.2-0 3', 'readline-6.2-0 3', 'sqlite-3.7.13-0 3', 'tk-8.5.13-0 3', 'zlib-1.2.7-0 3']}) + actions = defaultdict( + list, + { + "LINK": [ + "cython-0.19.1-py33_0 3", + "dateutil-1.5-py33_0 3", + "numpy-1.7.1-py33_0 3", + "python-3.3.2-0 3", + "readline-6.2-0 3", + "sqlite-3.7.13-0 3", + "tk-8.5.13-0 3", + "zlib-1.2.7-0 3", + ] + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following NEW packages will be INSTALLED: cython: 0.19.1-py33_0 (copy) @@ -671,51 +849,82 @@ def test_display_actions_link_type(): zlib: 1.2.7-0 (copy) """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3', - 'dateutil-2.1-py33_1 3'], 'UNLINK': ['cython-0.19-py33_0', - 'dateutil-1.5-py33_0']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19.1-py33_0 3", "dateutil-2.1-py33_1 3"], + "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be UPDATED: cython: 0.19-py33_0 --> 0.19.1-py33_0 (copy) dateutil: 1.5-py33_0 --> 2.1-py33_1 (copy) """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 3', - 'dateutil-1.5-py33_0 3'], 'UNLINK': ['cython-0.19.1-py33_0', - 'dateutil-2.1-py33_1']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19-py33_0 3", "dateutil-1.5-py33_0 3"], + "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be DOWNGRADED: cython: 0.19.1-py33_0 --> 0.19-py33_0 (copy) dateutil: 2.1-py33_1 --> 1.5-py33_0 (copy) """ - with env_var('CONDA_SHOW_CHANNEL_URLS', 'True', stack_callback=conda_tests_ctxt_mgmt_def_pol): - - d = Dist('cython-0.19.1-py33_0.tar.bz2') - index[d] = PackageRecord.from_objects(index[d], channel='my_channel') - - d = Dist('dateutil-1.5-py33_0.tar.bz2') - index[d] = PackageRecord.from_objects(index[d], channel='my_channel') - - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3', 'dateutil-1.5-py33_0 3', - 'numpy-1.7.1-py33_0 3', 'python-3.3.2-0 3', 'readline-6.2-0 3', 'sqlite-3.7.13-0 3', 'tk-8.5.13-0 3', 'zlib-1.2.7-0 3']}) + ) + with env_var( + "CONDA_SHOW_CHANNEL_URLS", "True", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): + d = Dist("cython-0.19.1-py33_0.tar.bz2") + index[d] = PackageRecord.from_objects(index[d], channel="my_channel") + + d = Dist("dateutil-1.5-py33_0.tar.bz2") + index[d] = PackageRecord.from_objects(index[d], channel="my_channel") + + actions = defaultdict( + list, + { + "LINK": [ + "cython-0.19.1-py33_0 3", + "dateutil-1.5-py33_0 3", + "numpy-1.7.1-py33_0 3", + "python-3.3.2-0 3", + "readline-6.2-0 3", + "sqlite-3.7.13-0 3", + "tk-8.5.13-0 3", + "zlib-1.2.7-0 3", + ] + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following NEW packages will be INSTALLED: cython: 0.19.1-py33_0 my_channel (copy) @@ -728,53 +937,73 @@ def test_display_actions_link_type(): zlib: 1.2.7-0 (copy) """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 3', - 'dateutil-2.1-py33_1 3'], 'UNLINK': ['cython-0.19-py33_0', - 'dateutil-1.5-py33_0']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19.1-py33_0 3", "dateutil-2.1-py33_1 3"], + "UNLINK": ["cython-0.19-py33_0", "dateutil-1.5-py33_0"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be UPDATED: cython: 0.19-py33_0 --> 0.19.1-py33_0 my_channel (copy) dateutil: 1.5-py33_0 my_channel --> 2.1-py33_1 (copy) """ + ) - actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 3', - 'dateutil-1.5-py33_0 3'], 'UNLINK': ['cython-0.19.1-py33_0', - 'dateutil-2.1-py33_1']}) + actions = defaultdict( + list, + { + "LINK": ["cython-0.19-py33_0 3", "dateutil-1.5-py33_0 3"], + "UNLINK": ["cython-0.19.1-py33_0", "dateutil-2.1-py33_1"], + }, + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ The following packages will be DOWNGRADED: cython: 0.19.1-py33_0 my_channel --> 0.19-py33_0 (copy) dateutil: 2.1-py33_1 --> 1.5-py33_0 my_channel (copy) """ + ) def test_display_actions_features(): - with env_var('CONDA_SHOW_CHANNEL_URLS', 'False', stack_callback=conda_tests_ctxt_mgmt_def_pol): - + with env_var( + "CONDA_SHOW_CHANNEL_URLS", "False", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + ] + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -784,19 +1013,24 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - ] - }) + actions.update( + { + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + ] + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -806,21 +1040,26 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.0=py33_p0'), - ] - }) + actions.update( + { + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.0=py33_p0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -829,21 +1068,26 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 [mkl] --> 1.7.0-py33_p0 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.0=py33_p0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.0=py33_p0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -852,22 +1096,27 @@ def test_display_actions_features(): numpy: 1.7.0-py33_p0 [mkl] --> 1.7.1-py33_p0 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) # NB: Packages whose version do not changed are put in UPDATED - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -876,21 +1125,26 @@ def test_display_actions_features(): numpy: 1.7.1-py33_0 --> 1.7.1-py33_p0 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_0'), - ] - }) + actions.update( + { + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -899,20 +1153,26 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 [mkl] --> 1.7.1-py33_0 """ - with env_var('CONDA_SHOW_CHANNEL_URLS', 'True', stack_callback=conda_tests_ctxt_mgmt_def_pol): - + ) + with env_var( + "CONDA_SHOW_CHANNEL_URLS", "True", stack_callback=conda_tests_ctxt_mgmt_def_pol + ): actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + ] + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -922,19 +1182,24 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 channel-1 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - get_matchspec_from_index(index, 'channel-1::cython==0.19=py33_0'), - ] - }) + actions.update( + { + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + get_matchspec_from_index(index, "channel-1::cython==0.19=py33_0"), + ] + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -944,21 +1209,26 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 channel-1 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.0=py33_p0'), - ] - }) + actions.update( + { + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.0=py33_p0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -967,21 +1237,26 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 channel-1 [mkl] --> 1.7.0-py33_p0 channel-1 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.0=py33_p0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.0=py33_p0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -990,22 +1265,27 @@ def test_display_actions_features(): numpy: 1.7.0-py33_p0 channel-1 [mkl] --> 1.7.1-py33_p0 channel-1 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_0'), - ] - }) + actions.update( + { + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) # NB: Packages whose version do not changed are put in UPDATED - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -1014,21 +1294,26 @@ def test_display_actions_features(): numpy: 1.7.1-py33_0 channel-1 --> 1.7.1-py33_p0 channel-1 [mkl] """ + ) actions = defaultdict(list) - actions.update({ - 'UNLINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_p0'), - ], - 'LINK': [ - get_matchspec_from_index(index, 'channel-1::numpy==1.7.1=py33_0'), - ] - }) + actions.update( + { + "UNLINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_p0"), + ], + "LINK": [ + get_matchspec_from_index(index, "channel-1::numpy==1.7.1=py33_0"), + ], + } + ) with captured() as c: display_actions(actions, index) - assert c.stdout == """ + assert ( + c.stdout + == """ ## Package Plan ## @@ -1037,53 +1322,68 @@ def test_display_actions_features(): numpy: 1.7.1-py33_p0 channel-1 [mkl] --> 1.7.1-py33_0 channel-1 """ + ) class TestDeprecatedExecutePlan(unittest.TestCase): - def test_update_old_plan(self): - old_plan = ['# plan', 'INSTRUCTION arg'] + old_plan = ["# plan", "INSTRUCTION arg"] new_plan = update_old_plan(old_plan) - expected = [('INSTRUCTION', 'arg')] + expected = [("INSTRUCTION", "arg")] self.assertEqual(new_plan, expected) with self.assertRaises(CondaError): - update_old_plan(['INVALID']) + update_old_plan(["INVALID"]) def test_execute_plan(self): initial_commands = inst.commands def set_commands(cmds): inst.commands = cmds - self.addCleanup(lambda : set_commands(initial_commands)) + + self.addCleanup(lambda: set_commands(initial_commands)) def INSTRUCTION_CMD(state, arg): INSTRUCTION_CMD.called = True INSTRUCTION_CMD.arg = arg - set_commands({'INSTRUCTION': INSTRUCTION_CMD}) + set_commands({"INSTRUCTION": INSTRUCTION_CMD}) - old_plan = ['# plan', 'INSTRUCTION arg'] + old_plan = ["# plan", "INSTRUCTION arg"] execute_plan(old_plan) self.assertTrue(INSTRUCTION_CMD.called) - self.assertEqual(INSTRUCTION_CMD.arg, 'arg') + self.assertEqual(INSTRUCTION_CMD.arg, "arg") def generate_mocked_resolve(pkgs, install=None): - mock_package = namedtuple("IndexRecord", - ["preferred_env", "name", "schannel", "version", "fn"]) - mock_resolve = namedtuple("Resolve", ["get_dists_for_spec", "index", "explicit", "install", - "package_name", "dependency_sort"]) + mock_package = namedtuple( + "IndexRecord", ["preferred_env", "name", "schannel", "version", "fn"] + ) + mock_resolve = namedtuple( + "Resolve", + [ + "get_dists_for_spec", + "index", + "explicit", + "install", + "package_name", + "dependency_sort", + ], + ) index = {} groups = defaultdict(list) for preferred_env, name, schannel, version in pkgs: dist = Dist.from_string(f"{name}-{version}-0", channel_override=schannel) pkg = mock_package( - preferred_env=preferred_env, name=name, schannel=schannel, version=version, fn=name + preferred_env=preferred_env, + name=name, + schannel=schannel, + version=version, + fn=name, ) groups[name].append(dist) index[dist] = pkg @@ -1107,9 +1407,14 @@ def get_package_name(dist): def get_dependency_sort(specs): return tuple(spec for spec in specs.values()) - return mock_resolve(get_dists_for_spec=get_dists_for_spec, index=index, explicit=get_explicit, - install=get_install, package_name=get_package_name, - dependency_sort=get_dependency_sort) + return mock_resolve( + get_dists_for_spec=get_dists_for_spec, + index=index, + explicit=get_explicit, + install=get_install, + package_name=get_package_name, + dependency_sort=get_dependency_sort, + ) def generate_mocked_record(dist_name): @@ -1118,8 +1423,15 @@ def generate_mocked_record(dist_name): def generate_mocked_context(prefix, root_prefix, envs_dirs): - mocked_context = namedtuple("Context", ["prefix", "root_prefix", "envs_dirs", "prefix_specified"]) - return mocked_context(prefix=prefix, root_prefix=root_prefix, envs_dirs=envs_dirs, prefix_specified=False) + mocked_context = namedtuple( + "Context", ["prefix", "root_prefix", "envs_dirs", "prefix_specified"] + ) + return mocked_context( + prefix=prefix, + root_prefix=root_prefix, + envs_dirs=envs_dirs, + prefix_specified=False, + ) class TestGetActionsForDist(unittest.TestCase): @@ -1128,25 +1440,38 @@ def setUp(self): (None, "test-spec", "defaults", "1"), ("ranenv", "test-spec", "defaults", "5"), (None, "test-spec2", "defaults", "1"), - ("ranenv", "test", "defaults", "1.2.0")] + ("ranenv", "test", "defaults", "1.2.0"), + ] self.res = generate_mocked_resolve(self.pkgs) def generate_remove_action(prefix, unlink): action = defaultdict(list) - action["op_order"] = ('CHECK_FETCH', 'RM_FETCHED', 'FETCH', 'CHECK_EXTRACT', 'RM_EXTRACTED', - 'EXTRACT', 'UNLINK', 'LINK', 'SYMLINK_CONDA') + action["op_order"] = ( + "CHECK_FETCH", + "RM_FETCHED", + "FETCH", + "CHECK_EXTRACT", + "RM_EXTRACTED", + "EXTRACT", + "UNLINK", + "LINK", + "SYMLINK_CONDA", + ) action["PREFIX"] = prefix action["UNLINK"] = unlink return action - def test_pinned_specs(): # Test pinned specs environment variable specs_str_1 = ("numpy 1.11", "python >3") specs_1 = tuple(MatchSpec(spec_str, optional=True) for spec_str in specs_str_1) - with env_var('CONDA_PINNED_PACKAGES', '&'.join(specs_str_1), stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_PINNED_PACKAGES", + "&".join(specs_str_1), + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): pinned_specs = get_pinned_specs("/none") assert pinned_specs == specs_1 assert pinned_specs != specs_str_1 @@ -1156,8 +1481,8 @@ def test_pinned_specs(): specs_2 = tuple(MatchSpec(spec_str, optional=True) for spec_str in specs_str_2) with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - with open(join(td, 'conda-meta', 'pinned'), 'w') as fh: + mkdir_p(join(td, "conda-meta")) + with open(join(td, "conda-meta", "pinned"), "w") as fh: fh.write("\n".join(specs_str_2)) fh.write("\n") pinned_specs = get_pinned_specs(td) @@ -1166,22 +1491,29 @@ def test_pinned_specs(): # Test pinned specs conda configuration and pinned specs conda environment file with tempdir() as td: - mkdir_p(join(td, 'conda-meta')) - pinned_filename = join(td, 'conda-meta', 'pinned') - with open(pinned_filename, 'w') as fh: + mkdir_p(join(td, "conda-meta")) + pinned_filename = join(td, "conda-meta", "pinned") + with open(pinned_filename, "w") as fh: fh.write("\n".join(specs_str_1)) fh.write("\n") - with env_var('CONDA_PREFIX', td, stack_callback=conda_tests_ctxt_mgmt_def_pol): - run_command(Commands.CONFIG, "--env", "--add", "pinned_packages", "requests=2.13") - condarc = join(td, '.condarc') - with env_var('CONDA_PINNED_PACKAGES', '&'.join(specs_str_2), partial(stack_context, True, search_path=(condarc,))):#conda_tests_ctxt_mgmt_def_pol): + with env_var("CONDA_PREFIX", td, stack_callback=conda_tests_ctxt_mgmt_def_pol): + run_command( + Commands.CONFIG, "--env", "--add", "pinned_packages", "requests=2.13" + ) + condarc = join(td, ".condarc") + with env_var( + "CONDA_PINNED_PACKAGES", + "&".join(specs_str_2), + partial(stack_context, True, search_path=(condarc,)), + ): # conda_tests_ctxt_mgmt_def_pol): pinned_specs = get_pinned_specs(td) - expected = specs_2 + (MatchSpec("requests 2.13.*", optional=True),) + specs_1 + expected = ( + specs_2 + (MatchSpec("requests 2.13.*", optional=True),) + specs_1 + ) assert pinned_specs == expected assert pinned_specs != specs_str_1 + ("requests 2.13",) + specs_str_2 - -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tests/test_priority.py b/tests/test_priority.py index c5a52f38d91..7dc1a239a5c 100644 --- a/tests/test_priority.py +++ b/tests/test_priority.py @@ -1,43 +1,59 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - +import re from json import loads as json_loads from unittest import TestCase -import re import pytest -from conda.base.context import context, conda_tests_ctxt_mgmt_def_pol +from conda.base.context import conda_tests_ctxt_mgmt_def_pol, context from conda.common.io import env_var -from conda.testing.integration import Commands, package_is_installed, get_conda_list_tuple, \ - make_temp_env, run_command +from conda.testing.integration import ( + Commands, + get_conda_list_tuple, + make_temp_env, + package_is_installed, + run_command, +) @pytest.mark.integration class PriorityIntegrationTests(TestCase): - def test_channel_order_channel_priority_true(self): # This is broken, make_temp_env will reset the context. We get away with it, but really # we need a function that does both these at the same time. - with env_var("CONDA_PINNED_PACKAGES", "python=3.8", stack_callback=conda_tests_ctxt_mgmt_def_pol): + with env_var( + "CONDA_PINNED_PACKAGES", + "python=3.8", + stack_callback=conda_tests_ctxt_mgmt_def_pol, + ): with make_temp_env("pycosat=0.6.3") as prefix: - assert package_is_installed(prefix, 'python=3.8') - assert package_is_installed(prefix, 'pycosat') + assert package_is_installed(prefix, "python=3.8") + assert package_is_installed(prefix, "pycosat") - payload, _, _ = run_command(Commands.CONFIG, prefix, "--get", "channels", "--json") + payload, _, _ = run_command( + Commands.CONFIG, prefix, "--get", "channels", "--json" + ) default_channels = json_loads(payload)["get"].get("channels") if default_channels: run_command(Commands.CONFIG, prefix, "--remove-key", "channels") # add conda-forge channel - o, e, _ = run_command(Commands.CONFIG, prefix, "--prepend", "channels", "conda-forge", '--json') + o, e, _ = run_command( + Commands.CONFIG, + prefix, + "--prepend", + "channels", + "conda-forge", + "--json", + ) assert context.channels == ("conda-forge", "defaults"), o + e # update --all - update_stdout, _, _ = run_command(Commands.UPDATE, prefix, '--all') + update_stdout, _, _ = run_command(Commands.UPDATE, prefix, "--all") # this assertion works with the pinned_packages config to make sure # conda update --all still respects the pinned python version - assert package_is_installed(prefix, 'python=3.8') + assert package_is_installed(prefix, "python=3.8") # pycosat should be in the SUPERSEDED list # after the 4.4 solver work, looks like it's in the DOWNGRADED list @@ -47,41 +63,52 @@ def test_channel_order_channel_priority_true(self): # # The following packages will be UPDATED to a higher-priority channel: # - installed_str, x = update_stdout.split('UPDATED') - assert re.search(r'pkgs/main::pycosat-0.6.3-py38h[^\s]+ --> conda-forge::pycosat', x) + installed_str, x = update_stdout.split("UPDATED") + assert re.search( + r"pkgs/main::pycosat-0.6.3-py38h[^\s]+ --> conda-forge::pycosat", x + ) # python sys.version should show conda-forge python python_tuple = get_conda_list_tuple(prefix, "python") - assert python_tuple[3] == 'conda-forge' + assert python_tuple[3] == "conda-forge" # conda list should show pycosat coming from conda-forge pycosat_tuple = get_conda_list_tuple(prefix, "pycosat") - assert pycosat_tuple[3] == 'conda-forge' + assert pycosat_tuple[3] == "conda-forge" def test_channel_priority_update(self): """ - This case will fail now + This case will fail now """ with make_temp_env("python=3.8", "pycosat") as prefix: - assert package_is_installed(prefix, 'python') + assert package_is_installed(prefix, "python") # clear channels config first to not assume default is defaults - payload, _, _ = run_command(Commands.CONFIG, prefix, "--get", "channels", "--json") + payload, _, _ = run_command( + Commands.CONFIG, prefix, "--get", "channels", "--json" + ) default_channels = json_loads(payload)["get"].get("channels") if default_channels: run_command(Commands.CONFIG, prefix, "--remove-key", "channels") # add conda-forge channel - o, e, _ = run_command(Commands.CONFIG, prefix, "--prepend", "channels", "conda-forge", '--json') - assert context.channels == ("conda-forge", "defaults"), o+e + o, e, _ = run_command( + Commands.CONFIG, + prefix, + "--prepend", + "channels", + "conda-forge", + "--json", + ) + assert context.channels == ("conda-forge", "defaults"), o + e # update python - update_stdout, _, _ = run_command(Commands.UPDATE, prefix, 'python') + update_stdout, _, _ = run_command(Commands.UPDATE, prefix, "python") # pycosat should be in the SUPERSEDED list - superceded_split = update_stdout.split('UPDATED') + superceded_split = update_stdout.split("UPDATED") assert len(superceded_split) == 2 - assert 'conda-forge' in superceded_split[1] + assert "conda-forge" in superceded_split[1] # python sys.version should show conda-forge python python_tuple = get_conda_list_tuple(prefix, "python") - assert python_tuple[3] == 'conda-forge' + assert python_tuple[3] == "conda-forge" diff --git a/tests/test_solvers.py b/tests/test_solvers.py index 5964b59e266..a74564df8fe 100644 --- a/tests/test_solvers.py +++ b/tests/test_solvers.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - from conda.core.solve import Solver from conda.testing.solver_helpers import SolverTests diff --git a/tests/test_toposort.py b/tests/test_toposort.py index ec91d29fd79..4027caf6bc1 100644 --- a/tests/test_toposort.py +++ b/tests/test_toposort.py @@ -1,49 +1,47 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import unittest from conda.common.toposort import pop_key, toposort class TopoSortTests(unittest.TestCase): - def test_pop_key(self): - key = pop_key({'a':{'b', 'c'}, 'b':{'c'}}) - self.assertEqual(key, 'b') + key = pop_key({"a": {"b", "c"}, "b": {"c"}}) + self.assertEqual(key, "b") - key = pop_key({'a':{'b'}, 'b':{'c', 'a'}}) - self.assertEqual(key, 'a') + key = pop_key({"a": {"b"}, "b": {"c", "a"}}) + self.assertEqual(key, "a") - key = pop_key({'a':{'b'}, 'b':{'a'}}) - self.assertEqual(key, 'a') + key = pop_key({"a": {"b"}, "b": {"a"}}) + self.assertEqual(key, "a") def test_simple(self): - data = {'a':'bc', 'b':'c'} + data = {"a": "bc", "b": "c"} results = toposort(data, safe=True) - self.assertEqual(results, ['c', 'b', 'a']) + self.assertEqual(results, ["c", "b", "a"]) results = toposort(data, safe=False) - self.assertEqual(results, ['c', 'b', 'a']) + self.assertEqual(results, ["c", "b", "a"]) def test_cycle(self): - data = {'a':'b', 'b':'a'} + data = {"a": "b", "b": "a"} with self.assertRaises(ValueError): toposort(data, False) results = toposort(data) # Results do not have an guaranteed order - self.assertEqual(set(results), {'b', 'a'}) + self.assertEqual(set(results), {"b", "a"}) def test_cycle_best_effort(self): - data = {'a':'bc', 'b':'c', '1':'2', '2':'1'} + data = {"a": "bc", "b": "c", "1": "2", "2": "1"} results = toposort(data) - self.assertEqual(results[:3], ['c', 'b', 'a']) + self.assertEqual(results[:3], ["c", "b", "a"]) # Cycles come last # Results do not have an guaranteed order - self.assertEqual(set(results[3:]), {'1', '2'}) + self.assertEqual(set(results[3:]), {"1", "2"}) def test_python_is_prioritized(self): """ @@ -56,33 +54,40 @@ def test_python_is_prioritized(self): isn't installed too late. Here, we verify that it works. """ # This is the actual dependency graph for python (as of the time of this writing, anyway) - data = {'python' : ['pip', 'openssl', 'readline', 'sqlite', 'tk', 'xz', 'zlib'], - 'pip': ['python', 'setuptools', 'wheel'], - 'setuptools' : ['python'], - 'wheel' : ['python'], - 'openssl' : [], - 'readline' : [], - 'sqlite' : [], - 'tk' : [], - 'xz' : [], - 'zlib' : []} + data = { + "python": ["pip", "openssl", "readline", "sqlite", "tk", "xz", "zlib"], + "pip": ["python", "setuptools", "wheel"], + "setuptools": ["python"], + "wheel": ["python"], + "openssl": [], + "readline": [], + "sqlite": [], + "tk": [], + "xz": [], + "zlib": [], + } # Here are some extra pure-python libs, just for good measure. - data.update({'psutil' : ['python'], - 'greenlet' : ['python'], - 'futures' : ['python'], - 'six' : ['python']}) + data.update( + { + "psutil": ["python"], + "greenlet": ["python"], + "futures": ["python"], + "six": ["python"], + } + ) results = toposort(data) # Python always comes before things that need it! - self.assertLess(results.index('python'), results.index('setuptools')) - self.assertLess(results.index('python'), results.index('wheel')) - self.assertLess(results.index('python'), results.index('pip')) - self.assertLess(results.index('python'), results.index('psutil')) - self.assertLess(results.index('python'), results.index('greenlet')) - self.assertLess(results.index('python'), results.index('futures')) - self.assertLess(results.index('python'), results.index('six')) + self.assertLess(results.index("python"), results.index("setuptools")) + self.assertLess(results.index("python"), results.index("wheel")) + self.assertLess(results.index("python"), results.index("pip")) + self.assertLess(results.index("python"), results.index("psutil")) + self.assertLess(results.index("python"), results.index("greenlet")) + self.assertLess(results.index("python"), results.index("futures")) + self.assertLess(results.index("python"), results.index("six")) + def test_degenerate(): """ @@ -91,5 +96,6 @@ def test_degenerate(): assert toposort({}) == [] assert toposort({}, safe=False) == [] -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tests/test_utils.py b/tests/test_utils.py index accaa21a0de..4f71fac8f5e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,5 @@ # Copyright (C) 2012 Anaconda, Inc # SPDX-License-Identifier: BSD-3-Clause - import sys from logging import getLogger from os import environ, pathsep @@ -8,16 +7,13 @@ from pathlib import Path from unittest.mock import patch -from conda import utils, CondaError -from conda.common.path import win_path_to_unix -from conda.testing.helpers import assert_equals +import pytest +from conda import CondaError, utils from conda.activate import CmdExeActivator, PosixActivator -from conda.common.path import which from conda.common.compat import on_win - -import pytest - +from conda.common.path import which, win_path_to_unix +from conda.testing.helpers import assert_equals SOME_PREFIX = "/some/prefix" SOME_FILES = ["a", "b", "c"] @@ -26,23 +22,28 @@ def test_path_translations(): paths = [ - (r"z:\miniconda\Scripts\pip.exe", - "/z/miniconda/Scripts/pip.exe", - "/cygdrive/z/miniconda/Scripts/pip.exe"), - (r"z:\miniconda;z:\Documents (x86)\pip.exe;c:\test", - "/z/miniconda:/z/Documents (x86)/pip.exe:/c/test", - "/cygdrive/z/miniconda:/cygdrive/z/Documents (x86)/pip.exe:/cygdrive/c/test"), + ( + r"z:\miniconda\Scripts\pip.exe", + "/z/miniconda/Scripts/pip.exe", + "/cygdrive/z/miniconda/Scripts/pip.exe", + ), + ( + r"z:\miniconda;z:\Documents (x86)\pip.exe;c:\test", + "/z/miniconda:/z/Documents (x86)/pip.exe:/c/test", + "/cygdrive/z/miniconda:/cygdrive/z/Documents (x86)/pip.exe:/cygdrive/c/test", + ), # Failures: # (r"z:\miniconda\Scripts\pip.exe", # "/z/miniconda/Scripts/pip.exe", # "/cygdrive/z/miniconda/Scripts/pip.exe"), - # ("z:\\miniconda\\", # "/z/miniconda/", # "/cygdrive/z/miniconda/"), - ("test dummy text /usr/bin;z:\\documents (x86)\\code\\conda\\tests\\envskhkzts\\test1;z:\\documents\\code\\conda\\tests\\envskhkzts\\test1\\cmd more dummy text", - "test dummy text /usr/bin:/z/documents (x86)/code/conda/tests/envskhkzts/test1:/z/documents/code/conda/tests/envskhkzts/test1/cmd more dummy text", - "test dummy text /usr/bin:/cygdrive/z/documents (x86)/code/conda/tests/envskhkzts/test1:/cygdrive/z/documents/code/conda/tests/envskhkzts/test1/cmd more dummy text"), + ( + "test dummy text /usr/bin;z:\\documents (x86)\\code\\conda\\tests\\envskhkzts\\test1;z:\\documents\\code\\conda\\tests\\envskhkzts\\test1\\cmd more dummy text", + "test dummy text /usr/bin:/z/documents (x86)/code/conda/tests/envskhkzts/test1:/z/documents/code/conda/tests/envskhkzts/test1/cmd more dummy text", + "test dummy text /usr/bin:/cygdrive/z/documents (x86)/code/conda/tests/envskhkzts/test1:/cygdrive/z/documents/code/conda/tests/envskhkzts/test1/cmd more dummy text", + ), ] for windows_path, unix_path, cygwin_path in paths: assert win_path_to_unix(windows_path) == unix_path @@ -60,20 +61,20 @@ def test_text_translations(): def get_conda_prefixes_on_PATH(): - ''' + """ :return: A tuple of: A list of conda prefixes found on PATH in the order in which they appear. A list of the suffixes that determine a conda prefix on this platform. - ''' + """ if on_win: - condapathlist = list(CmdExeActivator()._get_path_dirs('')) + condapathlist = list(CmdExeActivator()._get_path_dirs("")) else: - condapathlist = list(PosixActivator()._get_path_dirs('')) - pathlist=environ.get('PATH', '').split(pathsep) - pathlist=pathlist+pathlist + condapathlist = list(PosixActivator()._get_path_dirs("")) + pathlist = environ.get("PATH", "").split(pathsep) + pathlist = pathlist + pathlist conda_prefixes = [] - for pei, _ in enumerate(pathlist[:-len(condapathlist)]): + for pei, _ in enumerate(pathlist[: -len(condapathlist)]): all_good = True for cei, ce in enumerate(condapathlist): if not pathlist[pei + cei].endswith(ce): @@ -81,12 +82,12 @@ def get_conda_prefixes_on_PATH(): break if not all_good: continue - conda_prefixes.append(pathlist[pei][-len(condapathlist[0]):]) + conda_prefixes.append(pathlist[pei][-len(condapathlist[0]) :]) return conda_prefixes, condapathlist def get_prefix_containing_test_programs(test_programs=()): - ''' + """ This function returns the conda prefix of test_programs on PATH if: 1. Conda's path entries are found on PATH in the correct order. @@ -95,7 +96,7 @@ def get_prefix_containing_test_programs(test_programs=()): pushed env. and also when expected programs are not installed. It also detects mixed scenarios where different programs come from different prefixes which is never what we want. - ''' + """ prefixes, suffixes = get_conda_prefixes_on_PATH() for test_program in test_programs: @@ -114,10 +115,16 @@ def get_prefix_containing_test_programs(test_programs=()): found = True break if not found: - log.warning("{} not found in any conda prefixes ({}) on PATH", test_program, prefixes) + log.warning( + "{} not found in any conda prefixes ({}) on PATH", + test_program, + prefixes, + ) return None - if len(set(test_program_in_prefix))!=1: - log.warning(f"test_programs ({test_programs}) not all found in the same prefix") + if len(set(test_program_in_prefix)) != 1: + log.warning( + f"test_programs ({test_programs}) not all found in the same prefix" + ) return None return prefixes[test_program_in_prefix[0]] return prefixes[0] if prefixes else None @@ -137,6 +144,7 @@ def is_prefix_activated_PATHwise(prefix=sys.prefix, test_programs=()): _win_quotes = '"{}"'.format _quotes = _win_quotes if on_win else _posix_quotes + @pytest.mark.parametrize( ["args", "expected"], [