Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/4.4.x' into refactor-exception-h…
Browse files Browse the repository at this point in the history
…andler
  • Loading branch information
kalefranz committed Jun 26, 2017
2 parents c7df021 + 1b66365 commit 5ba7589
Show file tree
Hide file tree
Showing 28 changed files with 291 additions and 288 deletions.
8 changes: 6 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
* resolve #5160 conda xontrib plugin (#5157)
* resolve #1543 add support and tests for --no-deps and --only-deps (#5265)
* resolve #988 allow channel name to be part of the package name spec (#5365)
* resolve #5530 add ability for users to choose to post unexpected errors to core maintainers (#5531)
* Solver, UI, History, and Other (#5546)
* resolve #5530 add ability for users to choose to post unexpected errors to core maintainers (#5531, #5571)
* Solver, UI, History, and Other (#5546, #5583)

### Deprecations/Breaking Changes
* remove support for with_features_depends (#5191)
Expand Down Expand Up @@ -39,6 +39,7 @@
* resolve #5470 make stdout/stderr capture in python_api customizable (#5471)
* logging simplifications/improvements (#5547)
* update license information (#5568)
* enable threadpool use for repodata collection by default (#5546, #5587)

### Bug Fixes
* fix some conda-build compatibility issues (#5089)
Expand Down Expand Up @@ -80,6 +81,9 @@

## 4.3.23 (unreleased)

### Improvements
* resolve #5391 PackageNotFound and NoPackagesFoundError clean up (#5506)

### Bug Fixes
* fix #5525 too many Nones in CondaHttpError (#5526)
* fix #5508 assertion failure after test file not cleaned up (#5533)
Expand Down
92 changes: 25 additions & 67 deletions conda/cli/install.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

from __future__ import absolute_import, division, print_function, unicode_literals

from difflib import get_close_matches
from logging import getLogger
import os
from os.path import abspath, basename, exists, isdir, join
Expand All @@ -17,19 +16,19 @@
from ..base.context import context
from ..common.compat import text_type
from ..core.envs_manager import EnvsDirectory
from ..core.index import get_index
from ..core.index import get_channel_priority_map, get_index
from ..core.linked_data import linked as install_linked
from ..core.solve import Solver, get_pinned_specs
from ..exceptions import (CondaImportError, CondaOSError, CondaSystemExit,
CondaValueError, DirectoryNotFoundError, DryRunExit,
EnvironmentLocationNotFound, NoPackagesFoundError, PackageNotFoundError,
PackageNotInstalledError, TooManyArgumentsError,
from ..core.solve import Solver
from ..exceptions import (CondaImportError, CondaOSError, CondaSystemExit, CondaValueError,
DirectoryNotFoundError, DryRunExit, EnvironmentLocationNotFound,
PackageNotFoundError, PackageNotInstalledError, TooManyArgumentsError,
UnsatisfiableError)
from ..misc import append_env, clone_env, explicit, touch_nonadmin
from ..plan import revert_actions
from ..plan import (revert_actions)
from ..resolve import ResolvePackageNotFound, dashlist

log = getLogger(__name__)
stderr = getLogger('stderr')
stderrlog = getLogger('conda.stderr')


def check_prefix(prefix, json=False):
Expand All @@ -46,9 +45,9 @@ def check_prefix(prefix, json=False):
raise CondaValueError(error, json)

if ' ' in prefix:
stderr.warn("WARNING: A space was detected in your requested environment path\n"
"'%s'\n"
"Spaces in paths can sometimes be problematic." % prefix)
stderrlog.warn("WARNING: A space was detected in your requested environment path\n"
"'%s'\n"
"Spaces in paths can sometimes be problematic." % prefix)


def clone(src_arg, dst_prefix, json=False, quiet=False, index_args=None):
Expand Down Expand Up @@ -221,61 +220,20 @@ def install(args, parser, command='install'):
force_reinstall=context.force,
)
progressive_fetch_extract = unlink_link_transaction.get_pfe()
except NoPackagesFoundError as e:
error_message = [e.args[0]]

if isupdate and args.all:
# Packages not found here just means they were installed but
# cannot be found any more. Just skip them.
if not context.json:
print("Warning: %s, skipping" % error_message)
else:
# Not sure what to do here
pass
args._skip = getattr(args, '_skip', ['anaconda'])
for pkg in e.pkgs:
p = pkg.split()[0]
if p in args._skip:
# Avoid infinite recursion. This can happen if a spec
# comes from elsewhere, like --file
raise
args._skip.append(p)

return install(args, parser, command=command)
else:
packages = {index[fn]['name'] for fn in index}

nfound = 0
for pkg in sorted(e.pkgs, key=lambda x: x.name):
pkg = pkg.name
if pkg in packages:
continue
close = get_close_matches(pkg, packages, cutoff=0.7)
if not close:
continue
if nfound == 0:
error_message.append("\n\nClose matches found; did you mean one of these?\n")
error_message.append("\n %s: %s" % (pkg, ', '.join(close)))
nfound += 1
# error_message.append('\n\nYou can search for packages on anaconda.org with')
# error_message.append('\n\n anaconda search -t conda %s' % pkg)
if len(e.pkgs) > 1:
# Note this currently only happens with dependencies not found
error_message.append('\n\n(and similarly for the other packages)')

# if not find_executable('anaconda', include_others=False):
# error_message.append('\n\nYou may need to install the anaconda-client')
# error_message.append(' command line client with')
# error_message.append('\n\n conda install anaconda-client')

pinned_specs = get_pinned_specs(prefix)
if pinned_specs:
path = join(prefix, 'conda-meta', 'pinned')
error_message.append("\n\nNote that you have pinned specs in %s:" % path)
error_message.append("\n\n %r" % (pinned_specs,))

error_message = ''.join(error_message)
raise PackageNotFoundError(error_message)

except ResolvePackageNotFound as e:
pkg = e.bad_deps
pkg = dashlist(' -> '.join(map(str, q)) for q in pkg)
channel_priority_map = get_channel_priority_map(
channel_urls=index_args['channel_urls'],
prepend=index_args['prepend'],
platform=None,
use_local=index_args['use_local'],
)

channels_urls = tuple(channel_priority_map)

raise PackageNotFoundError(pkg, channels_urls)

except (UnsatisfiableError, SystemExit) as e:
# Unsatisfiable package specifications/no such revision/import error
Expand Down
2 changes: 1 addition & 1 deletion conda/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def init_loggers(context=None):
initialize_logging()
if context and context.json:
# Silence logging info to avoid interfering with JSON output
for logger in ('print', 'stdoutlog', 'stderrlog'):
for logger in ('conda.stdout.verbose', 'conda.stdoutlog', 'conda.stderrlog'):
getLogger(logger).setLevel(CRITICAL + 1)

if context and context.verbosity:
Expand Down
35 changes: 28 additions & 7 deletions conda/cli/main_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,14 +107,33 @@ def configure_parser(sub_parsers):


def execute(args, parser):
from ..common.compat import text_type
from ..exceptions import NoPackagesFoundError, PackageNotFoundError
from ..base.context import context
from ..core.index import get_channel_priority_map
from ..exceptions import PackageNotFoundError, ResolvePackageNotFound
from ..resolve import dashlist

try:
execute_search(args, parser)
except NoPackagesFoundError as e:
error_message = text_type(e)
raise PackageNotFoundError(error_message)
except ResolvePackageNotFound as e:
pkg = []
pkg.append(e.bad_deps)
pkg = dashlist(pkg)
index_args = {
'channel_urls': context.channels,
'prepend': not args.override_channels,
'use_local': args.use_local,
}

channel_priority_map = get_channel_priority_map(
channel_urls=index_args['channel_urls'],
prepend=index_args['prepend'],
platform=None,
use_local=index_args['use_local'],
)

channels_urls = tuple(channel_priority_map)

raise PackageNotFoundError(pkg, channels_urls)


def make_icon_url(info): # pragma: no cover
Expand All @@ -130,7 +149,7 @@ def execute_search(args, parser):
import re
from .common import (arg2spec, disp_features, ensure_override_channels_requires_channel,
ensure_use_local, stdout_json)
from ..resolve import Resolve
from ..resolve import Resolve, ResolvePackageNotFound
from ..core.index import get_index
from ..models.match_spec import MatchSpec
from ..core.linked_data import linked as linked_data
Expand Down Expand Up @@ -178,7 +197,6 @@ def execute_search(args, parser):
unknown=args.unknown)

r = Resolve(index)

if args.canonical:
json = []
else:
Expand All @@ -200,6 +218,9 @@ def execute_search(args, parser):
if res:
names.append((name, res))

if not names:
raise ResolvePackageNotFound(args.regex)

for name, pkgs in names:
disp_name = name

Expand Down
3 changes: 1 addition & 2 deletions conda/cli/python_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,8 @@
from ..common.io import CaptureTarget, argv, captured
from ..common.path import win_path_double_escape
from ..exceptions import conda_exception_handler
from ..gateways import initialize_std_loggers
from ..gateways.logging import initialize_std_loggers

initialize_std_loggers()
log = getLogger(__name__)


Expand Down
19 changes: 12 additions & 7 deletions conda/core/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from .._vendor.toolz.itertoolz import take # NOQA

log = getLogger(__name__)
stdoutlog = getLogger('stdoutlog')


def get_index(channel_urls=(), prepend=True, platform=None,
Expand All @@ -34,15 +33,10 @@ def get_index(channel_urls=(), prepend=True, platform=None,
If platform=None, then the current platform is used.
If prefix is supplied, then the packages installed in that prefix are added.
"""
if use_local:
channel_urls = ['local'] + list(channel_urls)
if prepend:
channel_urls += context.channels
if context.offline and unknown is None:
unknown = True

subdirs = (platform, 'noarch') if platform is not None else context.subdirs
channel_priority_map = prioritize_channels(channel_urls, subdirs=subdirs)
channel_priority_map = get_channel_priority_map(channel_urls, prepend, platform, use_local)
index = fetch_index(channel_priority_map, use_cache=use_cache)

if prefix or unknown:
Expand Down Expand Up @@ -127,5 +121,16 @@ def _supplement_index_with_features(index, features=()):
index[Dist(rec)] = rec


def get_channel_priority_map(channel_urls=(), prepend=True, platform=None, use_local=False):
if use_local:
channel_urls = ['local'] + list(channel_urls)
if prepend:
channel_urls += context.channels

subdirs = (platform, 'noarch') if platform is not None else context.subdirs
channel_priority_map = prioritize_channels(channel_urls, subdirs=subdirs)
return channel_priority_map


def dist_str_in_index(index, dist_str):
return Dist(dist_str) in index
17 changes: 7 additions & 10 deletions conda/core/link.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from subprocess import CalledProcessError
import sys
from tempfile import mkdtemp
from traceback import format_exc
from traceback import format_exception_only
import warnings

from .linked_data import PrefixData, get_python_version_for_prefix, linked_data as get_linked_data
Expand Down Expand Up @@ -317,8 +317,8 @@ def _verify_individual_level(prefix_action_group):
continue
error_result = axn.verify()
if error_result:
log.debug("Verification error in action %s", axn)
log.debug(format_exc())
formatted_error = ''.join(format_exception_only(type(error_result), error_result))
log.debug("Verification error in action %s\n%s", axn, formatted_error)
yield error_result

@staticmethod
Expand Down Expand Up @@ -531,8 +531,8 @@ def _execute_actions(pkg_idx, axngroup):
run_script(target_prefix, prec, 'post-unlink' if is_unlink else 'post-link')
except Exception as e: # this won't be a multi error
# reverse this package
log.debug("Error in action #%d for pkg_idx #%d %r", axn_idx, pkg_idx, action)
log.debug(format_exc())
log.debug("Error in action #%d for pkg_idx #%d %r", axn_idx, pkg_idx, action,
exc_info=True)
reverse_excs = ()
if context.rollback_enabled:
# log.error("An error occurred while %s package '%s'.\n"
Expand Down Expand Up @@ -574,8 +574,7 @@ def _reverse_actions(pkg_idx, axngroup, reverse_from_idx=-1):
action.reverse()
except Exception as e:
log.debug("action.reverse() error in action #%d for pkg_idx #%d %r", axn_idx,
pkg_idx, action)
log.debug(format_exc())
pkg_idx, action, exc_info=True)
exceptions.append(e)
return exceptions

Expand Down Expand Up @@ -728,9 +727,7 @@ def run_script(prefix, prec, action='post-link', env_prefix=None):
or "$PREFIX/bin/python $SOURCE_DIR/link.py" in script_text):
is_old_noarch = True
except Exception as e:
import traceback
log.debug(e)
log.debug(traceback.format_exc())
log.debug(e, exc_info=True)

env['SOURCE_DIR'] = prefix
if not is_old_noarch:
Expand Down
1 change: 0 additions & 1 deletion conda/core/package_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@


log = getLogger(__name__)
stderrlog = getLogger('stderrlog')


class PackageCacheType(type):
Expand Down
10 changes: 4 additions & 6 deletions conda/core/repodata.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@
__all__ = ('RepoData',)

log = getLogger(__name__)
stderrlog = getLogger('stderrlog')
stderrlog = getLogger('conda.stderrlog')

REPODATA_PICKLE_VERSION = 3
REPODATA_HEADER_RE = b'"(_etag|_mod|_cache_control)":[ ]?"(.*)"'
Expand Down Expand Up @@ -386,8 +386,7 @@ def write_pickled_repodata(cache_path, repodata):
with open(get_pickle_path(cache_path), 'wb') as f:
pickle.dump(repodata, f)
except Exception as e:
import traceback
log.debug("Failed to dump pickled repodata.\n%s", traceback.format_exc())
log.debug("Failed to dump pickled repodata.", exc_info=True)


def read_pickled_repodata(cache_path, channel_url, schannel, priority, etag, mod_stamp):
Expand All @@ -401,8 +400,7 @@ def read_pickled_repodata(cache_path, channel_url, schannel, priority, etag, mod
with open(pickle_path, 'rb') as f:
repodata = pickle.load(f)
except Exception as e:
import traceback
log.debug("Failed to load pickled repodata.\n%s", traceback.format_exc())
log.debug("Failed to load pickled repodata.", exc_info=True)
rm_rf(pickle_path)
return None

Expand Down Expand Up @@ -587,7 +585,7 @@ def collect_all_repodata_as_index(use_cache, tasks):
if context.concurrent:
try:
from concurrent.futures import ThreadPoolExecutor
executor = ThreadPoolExecutor(5)
executor = ThreadPoolExecutor(10)
index = _collect_repodatas_concurrent_as_index(executor, use_cache, tasks)
except (ImportError, RuntimeError) as e:
# concurrent.futures is only available in Python >= 3.2 or if futures is installed
Expand Down
Loading

0 comments on commit 5ba7589

Please sign in to comment.