Skip to content

Commit

Permalink
Make faster
Browse files Browse the repository at this point in the history
  • Loading branch information
Sophia Castellarin committed Jan 8, 2020
1 parent 77aa2af commit 2a8e46b
Showing 1 changed file with 10 additions and 7 deletions.
17 changes: 10 additions & 7 deletions conda/resolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -406,7 +406,8 @@ def build_graph_of_deps(self, spec):
all_deps.add(new_node)
new_path = list(path)
new_path.append(new_node)
queue.append(new_path)
if len(new_path) <= 3:
queue.append(new_path)
return dep_graph, all_deps

def build_conflict_map(self, specs, specs_to_add=None, history_specs=None):
Expand Down Expand Up @@ -447,15 +448,16 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None):
if len(matches) == 1:
specs = set(self.ms_depends(matches[0]))
specs.update({_.to_match_spec() for _ in self._system_precs})
print("Building up graph of deps")
for spec in specs:
self._get_package_pool((spec, ))

dep_graph = {}
dep_list = {}
print("Building up graph of deps")
for spec in specs:
print("Building up graph of deps for %s" % spec)

dep_graph_for_spec, all_deps_for_spec = self.build_graph_of_deps(spec)
print("Done building up graph of deps for %s" % spec)
dep_graph.update(dep_graph_for_spec)
if dep_list.get(spec.name):
dep_list[spec.name].append(spec)
Expand All @@ -471,12 +473,13 @@ def build_conflict_map(self, specs, specs_to_add=None, history_specs=None):
chains = []
conflicting_pkgs_pkgs = {}
for k, v in dep_list.items():
set_v = frozenset(v)
# Packages probably conflict
if len(v) > 1:
if conflicting_pkgs_pkgs.get(frozenset(v)) is None:
conflicting_pkgs_pkgs[frozenset(v)] = [k]
if len(set_v) > 1:
if conflicting_pkgs_pkgs.get(set_v) is None:
conflicting_pkgs_pkgs[set_v] = [k]
else:
conflicting_pkgs_pkgs[frozenset(v)].append(k)
conflicting_pkgs_pkgs[set_v].append(k)

for roots, nodes in conflicting_pkgs_pkgs.items():
lroots = [_ for _ in roots]
Expand Down

0 comments on commit 2a8e46b

Please sign in to comment.