Skip to content

Commit

Permalink
Fail with a readable error message if PackageCacheRecords are missing (
Browse files Browse the repository at this point in the history
…conda#11591)

* Fail with a readable warning if PackageCacheRecords are missing

* Minor fixes to error message.

This also doesn’t use cont.io for the tests.

* More test fixes and minor code cleanup.

* Update news/11591-readable-error-message-if-packagecacherecords-missing

Co-authored-by: Jannis Leidel <[email protected]>
Co-authored-by: Ken Odegard <[email protected]>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
4 people authored Sep 29, 2022
1 parent a8ba678 commit eb3e6a7
Show file tree
Hide file tree
Showing 3 changed files with 189 additions and 111 deletions.
146 changes: 93 additions & 53 deletions conda/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,11 @@
from .core.package_cache_data import PackageCacheData, ProgressiveFetchExtract
from .core.prefix_data import PrefixData
from .exceptions import (
DisallowedPackageError, DryRunExit, PackagesNotFoundError,
ParseError, CondaExitZero
DisallowedPackageError,
DryRunExit,
PackagesNotFoundError,
ParseError,
CondaExitZero,
)
from .gateways.disk.delete import rm_rf
from .gateways.disk.link import islink, readlink, symlink
Expand All @@ -39,41 +42,45 @@ def conda_installed_files(prefix, exclude_self_build=False):
"""
res = set()
for meta in PrefixData(prefix).iter_records():
if exclude_self_build and 'file_hash' in meta:
if exclude_self_build and "file_hash" in meta:
continue
res.update(set(meta.get('files', ())))
res.update(set(meta.get("files", ())))
return res


url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?'
r'(?P<fn>[^/\\#]+(?:\.tar\.bz2|\.conda))'
r'(:?#(?P<md5>[0-9a-f]{32}))?$')
url_pat = re.compile(
r"(?:(?P<url_p>.+)(?:[/\\]))?"
r"(?P<fn>[^/\\#]+(?:\.tar\.bz2|\.conda))"
r"(:?#(?P<md5>[0-9a-f]{32}))?$"
)


def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None, index=None):
actions = defaultdict(list)
actions['PREFIX'] = prefix
actions["PREFIX"] = prefix

fetch_specs = []
for spec in specs:
if spec == '@EXPLICIT':
if spec == "@EXPLICIT":
continue

if not is_url(spec):
'''
"""
# This does not work because url_to_path does not enforce Windows
# backslashes. Should it? Seems like a dangerous change to make but
# it would be cleaner.
expanded = expand(spec)
urled = path_to_url(expanded)
pathed = url_to_path(urled)
assert pathed == expanded
'''
"""
spec = path_to_url(expand(spec))

# parse URL
m = url_pat.match(spec)
if m is None:
raise ParseError('Could not parse explicit URL: %s' % spec)
url_p, fn, md5sum = m.group('url_p'), m.group('fn'), m.group('md5')
raise ParseError("Could not parse explicit URL: %s" % spec)
url_p, fn, md5sum = m.group("url_p"), m.group("fn"), m.group("md5")
url = join_url(url_p, fn)
# url_p is everything but the tarball_basename and the md5sum

Expand All @@ -86,14 +93,25 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None,
pfe.execute()

if context.download_only:
raise CondaExitZero('Package caches prepared. '
'UnlinkLinkTransaction cancelled with --download-only option.')
raise CondaExitZero(
"Package caches prepared. "
"UnlinkLinkTransaction cancelled with --download-only option."
)

# now make an UnlinkLinkTransaction with the PackageCacheRecords as inputs
# need to add package name to fetch_specs so that history parsing keeps track of them correctly
specs_pcrecs = tuple([spec, next(PackageCacheData.query_all(spec), None)]
for spec in fetch_specs)
assert not any(spec_pcrec[1] is None for spec_pcrec in specs_pcrecs)
specs_pcrecs = tuple(
[spec, next(PackageCacheData.query_all(spec), None)] for spec in fetch_specs
)

# Assert that every spec has a PackageCacheRecord
specs_with_missing_pcrecs = [str(spec) for spec, pcrec in specs_pcrecs if pcrec is None]
if specs_with_missing_pcrecs:
if len(specs_with_missing_pcrecs) == len(specs_pcrecs):
raise AssertionError("No package cache records found")
else:
missing_precs_list = ", ".join(specs_with_missing_pcrecs)
raise AssertionError(f"Missing package cache records for: {missing_precs_list}")

precs_to_remove = []
prefix_data = PrefixData(prefix)
Expand All @@ -109,8 +127,14 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None,
else:
precs_to_remove.append(prec)

stp = PrefixSetup(prefix, precs_to_remove, tuple(sp[1] for sp in specs_pcrecs if sp[0]),
(), tuple(sp[0] for sp in specs_pcrecs if sp[0]), ())
stp = PrefixSetup(
prefix,
precs_to_remove,
tuple(sp[1] for sp in specs_pcrecs if sp[0]),
(),
tuple(sp[0] for sp in specs_pcrecs if sp[0]),
(),
)

txn = UnlinkLinkTransaction(stp)
txn.execute()
Expand All @@ -119,7 +143,7 @@ def explicit(specs, prefix, verbose=False, force_extract=True, index_args=None,
def rel_path(prefix, path, windows_forward_slashes=True):
res = path[len(prefix) + 1:]
if on_win and windows_forward_slashes:
res = res.replace('\\', '/')
res = res.replace("\\", "/")
return res


Expand All @@ -129,20 +153,31 @@ def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=Tr
"""
res = set()
prefix = abspath(prefix)
ignore = {'pkgs', 'envs', 'conda-bld', 'conda-meta', '.conda_lock',
'users', 'LICENSE.txt', 'info', 'conda-recipes', '.index',
'.unionfs', '.nonadmin'}
binignore = {'conda', 'activate', 'deactivate'}
if sys.platform == 'darwin':
ignore.update({'python.app', 'Launcher.app'})
ignore = {
"pkgs",
"envs",
"conda-bld",
"conda-meta",
".conda_lock",
"users",
"LICENSE.txt",
"info",
"conda-recipes",
".index",
".unionfs",
".nonadmin",
}
binignore = {"conda", "activate", "deactivate"}
if sys.platform == "darwin":
ignore.update({"python.app", "Launcher.app"})
for fn in (entry.name for entry in os.scandir(prefix)):
if ignore_predefined_files and fn in ignore:
continue
if isfile(join(prefix, fn)):
res.add(fn)
continue
for root, dirs, files in os.walk(join(prefix, fn)):
should_ignore = ignore_predefined_files and root == join(prefix, 'bin')
should_ignore = ignore_predefined_files and root == join(prefix, "bin")
for fn2 in files:
if should_ignore and fn2 in binignore:
continue
Expand All @@ -153,7 +188,7 @@ def walk_prefix(prefix, ignore_predefined_files=True, windows_forward_slashes=Tr
res.add(relpath(path, prefix))

if on_win and windows_forward_slashes:
return {path.replace('\\', '/') for path in res}
return {path.replace("\\", "/") for path in res}
else:
return res

Expand All @@ -164,23 +199,27 @@ def untracked(prefix, exclude_self_build=False):
"""
conda_files = conda_installed_files(prefix, exclude_self_build)
return {
path for path in walk_prefix(prefix) - conda_files
path
for path in walk_prefix(prefix) - conda_files
if not (
path.endswith('~')
or sys.platform == 'darwin' and path.endswith('.DS_Store')
or path.endswith('.pyc') and path[:-1] in conda_files
)}
path.endswith("~")
or sys.platform == "darwin"
and path.endswith(".DS_Store")
or path.endswith(".pyc")
and path[:-1] in conda_files
)
}


def touch_nonadmin(prefix):
"""
Creates $PREFIX/.nonadmin if sys.prefix/.nonadmin exists (on Windows)
"""
if on_win and exists(join(context.root_prefix, '.nonadmin')):
if on_win and exists(join(context.root_prefix, ".nonadmin")):
if not isdir(prefix):
os.makedirs(prefix)
with open(join(prefix, '.nonadmin'), 'w') as fo:
fo.write('')
with open(join(prefix, ".nonadmin"), "w") as fo:
fo.write("")


def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
Expand All @@ -195,11 +234,11 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
while found:
found = False
for prec in PrefixData(prefix1).iter_records():
name = prec['name']
name = prec["name"]
if name in filter:
continue
if name == 'conda':
filter['conda'] = prec
if name == "conda":
filter["conda"] = prec
found = True
break
if name == "conda-env":
Expand All @@ -214,16 +253,16 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
if filter:
if not quiet:
fh = sys.stderr if context.json else sys.stdout
print('The following packages cannot be cloned out of the root environment:', file=fh)
print("The following packages cannot be cloned out of the root environment:", file=fh)
for prec in filter.values():
print(' - ' + prec.dist_str(), file=fh)
drecs = {prec for prec in PrefixData(prefix1).iter_records() if prec['name'] not in filter}
print(" - " + prec.dist_str(), file=fh)
drecs = {prec for prec in PrefixData(prefix1).iter_records() if prec["name"] not in filter}
else:
drecs = {prec for prec in PrefixData(prefix1).iter_records()}

# Resolve URLs for packages that do not have URLs
index = {}
unknowns = [prec for prec in drecs if not prec.get('url')]
unknowns = [prec for prec in drecs if not prec.get("url")]
notfound = []
if unknowns:
index_args = index_args or {}
Expand All @@ -246,7 +285,7 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
# Assemble the URL and channel list
urls = {}
for prec in drecs:
urls[prec] = prec['url']
urls[prec] = prec["url"]

precs = tuple(PrefixGraph(urls).graph)
urls = [urls[prec] for prec in precs]
Expand All @@ -257,8 +296,8 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
raise DisallowedPackageError(prec)

if verbose:
print('Packages: %d' % len(precs))
print('Files: %d' % len(untracked_files))
print("Packages: %d" % len(precs))
print("Files: %d" % len(untracked_files))

if context.dry_run:
raise DryRunExit()
Expand All @@ -276,22 +315,23 @@ def clone_env(prefix1, prefix2, verbose=True, quiet=False, index_args=None):
continue

try:
with open(src, 'rb') as fi:
with open(src, "rb") as fi:
data = fi.read()
except IOError:
continue

try:
s = data.decode('utf-8')
s = data.decode("utf-8")
s = s.replace(prefix1, prefix2)
data = s.encode('utf-8')
data = s.encode("utf-8")
except UnicodeDecodeError: # data is binary
pass

with open(dst, 'wb') as fo:
with open(dst, "wb") as fo:
fo.write(data)
shutil.copystat(src, dst)

actions = explicit(urls, prefix2, verbose=not quiet, index=index,
force_extract=False, index_args=index_args)
actions = explicit(
urls, prefix2, verbose=not quiet, index=index, force_extract=False, index_args=index_args
)
return actions, untracked_files
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
### Enhancements

* Added an informative message if explicit install fails due to requested
packages not being in the cache. (#11591)
Loading

0 comments on commit eb3e6a7

Please sign in to comment.