Skip to content

Commit

Permalink
interim
Browse files Browse the repository at this point in the history
  • Loading branch information
naylor-b committed Dec 15, 2014
1 parent fe1044f commit 5cf22a1
Show file tree
Hide file tree
Showing 7 changed files with 81 additions and 19 deletions.
2 changes: 1 addition & 1 deletion contrib/testmpi/test_mpi.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def test_fan_out_in(self):

top.run()

top._system.dump()
#top._system.dump()

if self.comm.rank == 0:
self.assertTrue(all(top.C4.a==np.ones(size, float)*11.))
Expand Down
6 changes: 5 additions & 1 deletion contrib/testmpi/test_mpi_derivatives.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
import numpy as np

from openmdao.util.testutil import assert_rel_error
from openmdao.test.mpiunittest import MPITestCase, collective_assert_rel_error
from openmdao.test.mpiunittest import MPITestCase, collective_assert_rel_error, \
MPIContext
from openmdao.main.api import Assembly, Component, set_as_top
from openmdao.main.datatypes.api import Float
from openmdao.main.mpiwrap import mpiprint
Expand Down Expand Up @@ -94,7 +95,10 @@ def test_calc_gradient_adjoint(self):
J = self.top.driver.workflow._system.get_combined_J(J)
#mpiprint("final J: %s" % J)

#with MPIContext():
self.top._system.dump()

print "FOO"

collective_assert_rel_error(self,
J['_pseudo_0.out0']['comp.x'][0][0],
Expand Down
21 changes: 14 additions & 7 deletions openmdao.main/src/openmdao/main/array_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,26 +102,33 @@ def idx_size(idxs, size=None):
str(type(idxs)))

def to_slice(idxs):
"""Convert an index array to a slice if possible. Otherwise,
return the index array.
"""Convert an index array or list to a slice if possible. Otherwise,
return the index array or list.
"""
if isinstance(idxs, slice):
return idxs
elif isinstance(idxs, ndarray):
elif isinstance(idxs, ndarray) or isinstance(idxs, list):
if len(idxs) == 1:
return slice(idxs[0], idxs[0]+1)
elif len(idxs) == 0:
return slice(0,0)

imin = idxs.min()
imax = idxs.max()
if isinstance(idxs, ndarray):
imin = idxs.min()
imax = idxs.max()
else:
imin = min(idxs)
imax = max(idxs)

stride = idxs[1]-idxs[0]

if stride == 0:
return idxs

for i in xrange(len(idxs)):
if i and idxs[i] - idxs[i-1] != stride:
return idxs


if stride < 0:
## negative strides cause some failures, so just do positive for now
#return slice(imax+1, imin, stride)
Expand All @@ -134,7 +141,7 @@ def to_slice(idxs):
raise RuntimeError("can't convert indices of type '%s' to a slice" %
str(type(idxs)))

def to_indices(idxs, val):
def to_indices(idxs, val=None):
"""Convert an slice or simple index into an index array.
index arrays are just returned unchanged.
"""
Expand Down
6 changes: 3 additions & 3 deletions openmdao.main/src/openmdao/main/linearsolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,9 +318,9 @@ def mult(self, mat, sol_vec, rhs_vec):
system.applyJ(vnames)

rhs_vec.array[:] = system.rhs_vec.array[:]
mpiprint('names = %s' % system.sol_vec.keys())
mpiprint('arg = %s, result=%s' % (sol_vec.array, rhs_vec.array))
mpiprint('df, du, dp', system.vec['df'].array, system.vec['du'].array, system.vec['dp'].array)
#mpiprint('names = %s' % system.sol_vec.keys())
#mpiprint('arg = %s, result=%s' % (sol_vec.array, rhs_vec.array))
#mpiprint('df, du, dp', system.vec['df'].array, system.vec['du'].array, system.vec['dp'].array)

def apply(self, mat, sol_vec, rhs_vec):
""" Applies preconditioner """
Expand Down
5 changes: 4 additions & 1 deletion openmdao.main/src/openmdao/main/systems.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,8 +638,11 @@ def dump(self, nest=0, stream=sys.stdout, verbose=False):
elif isinstance(self, AssemblySystem):
self._comp._system.dump(nest, stream)
else:
partial_subs = [s for s in self.local_subsystems() if s.scatter_partial]
for sub in self.local_subsystems():
sub.dump(nest, stream)
if sub in partial_subs:
sub.scatter_partial.dump(self, self.vec['u'], self.vec['p'], nest+4, stream)

return stream.getvalue() if getval else None

Expand Down Expand Up @@ -1360,7 +1363,7 @@ def setup_scatters(self):
continue
noflat_conns.add(node)
else:
print node, src_idxs
#print node, src_idxs
src_partial.append(src_idxs)
dest_partial.append(dest_idxs)

Expand Down
36 changes: 30 additions & 6 deletions openmdao.main/src/openmdao/main/vecwrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
get_flat_index_start, get_val_and_index, get_shape, \
get_flattened_index, to_slice, to_indices
from openmdao.main.interfaces import IImplicitComponent
from openmdao.main.array_helpers import to_indices
from openmdao.util.typegroups import int_types
from openmdao.util.graph import base_var

Expand Down Expand Up @@ -425,6 +426,9 @@ def __init__(self, system, var_idxs, input_idxs,
#print "var_idxs", var_idxs
#print "input_idxs", input_idxs
var_idxs, input_idxs = merge_idxs(var_idxs, input_idxs)

self.var_idxs = to_slice(var_idxs)
self.input_idxs = to_slice(input_idxs)

if len(var_idxs) != len(input_idxs):
raise RuntimeError("ERROR: creating scatter (index size mismatch): (%d != %d) srcs: %s, dest: %s in %s" %
Expand All @@ -437,10 +441,10 @@ def __init__(self, system, var_idxs, input_idxs,
input_idx_set = PETSc.IS().createGeneral(input_idxs,
comm=system.mpi.comm)

print 'before', var_idx_set.indices
#print 'before', var_idx_set.indices
if system.app_ordering is not None:
var_idx_set = system.app_ordering.app2petsc(var_idx_set)
print 'after', var_idx_set.indices
#print 'after', var_idx_set.indices

try:
# note that scatter created here can be reused for other vectors as long
Expand Down Expand Up @@ -491,12 +495,32 @@ def __call__(self, system, srcvec, destvec, complex_step=False):
for dest in dests:
if src != dest:
try:
system.scope.set(dest, system.scope.get_attr_w_copy(src))
system.scope.set(dest,
system.scope.get_attr_w_copy(src))
except Exception:
system.scope.reraise_exception("cannot set '%s' from '%s'" % (dest, src))
system.scope.reraise_exception("cannot set '%s' from '%s'" %
(dest, src))

def dump(self):
pass
def dump(self, system, srcvec, destvec, nest=0, stream=sys.stdout):
stream.write(" "*nest)
stream.write("Scatters: ")
if not self.scatter_conns:
stream.write("(empty)\n")
return
stream.write("\n")
stream.write(" "*nest)
stream.write("scatter vars: %s\n" % sorted(self.scatter_conns)
)
stream.write(" "*nest)
stream.write("%s --> %s\n" % (self.var_idxs, self.input_idxs))
if MPI and system.app_ordering:
var_idxs = to_indices(self.var_idxs, srcvec.array)
var_idx_set = system.app_ordering.app2petsc(var_idxs)
stream.write(" "*nest)
stream.write("(petsc): %s --> %s\n" % (var_idx_set, self.input_idxs))
if self.noflat_vars:
stream.write(" "*nest)
stream.write("no-flats: %s\n" % self.noflat_vars)


class SerialScatter(object):
Expand Down
24 changes: 24 additions & 0 deletions openmdao.test/src/openmdao/test/mpiunittest.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,30 @@
MPI = None


class MPIContext(object):
"""Supports using the 'with' statement when executing code in
multiple MPI processes so that if any of the blocks raise an
exception, all processes sharing that communicator will fail.
"""
def __init__(self,):
pass

def __enter__(self):
pass

def __exit__(self, exc_type, exc_val, exc_tb):
if exc_val is not None:
fail = True
else:
fail = False

fails = MPI.COMM_WORLD.allgather(fail)

if fail:
return None # exception will be re-raised for us
else:
raise RuntimeError("a test failed in another rank")

def mpi_fail_if_any(f):
"""In order to keep MPI tests from hanging when
a test fails in one process and succeeds in
Expand Down

0 comments on commit 5cf22a1

Please sign in to comment.