Skip to content

Commit

Permalink
improve error messages when a method or attribute is missing (#27110)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: pytorch/pytorch#27110

Previously missing methods on some types like tensors would talk about
'builtins' which are only a thing inside of the compiler. Furthermore,
the error would only occur when the builtin was applied and it was discovered
that no builtin existed. This changes the error message so that it
discovers that method on our builtin types does not exist on attribute lookup.

Test Plan: Imported from OSS

Differential Revision: D17677616

Pulled By: zdevito

fbshipit-source-id: 2f7cf6c6093a9c832569c44f4b1044a2e56fe205
  • Loading branch information
zdevito authored and facebook-github-bot committed Oct 4, 2019
1 parent ef97841 commit 9ade1e6
Show file tree
Hide file tree
Showing 5 changed files with 81 additions and 37 deletions.
10 changes: 5 additions & 5 deletions test/test_jit.py
Original file line number Diff line number Diff line change
Expand Up @@ -4598,7 +4598,7 @@ def test_method_on_number(self):
def func():
c = 1
return c.add(1)
with self.assertRaisesRegex(RuntimeError, 'Cannot call methods on numbers'):
with self.assertRaisesRegex(RuntimeError, 'nonexistent attribute or method'):
torch.jit.script(func)

# testing implicit conversion of tensors to scalars to match function arguments
Expand Down Expand Up @@ -10668,7 +10668,7 @@ def forward(self, x):
ReassignSelfRHS()

def test_unknown_builtin(self):
with self.assertRaisesRegex(RuntimeError, 'Unknown builtin op'):
with self.assertRaisesRegex(RuntimeError, 'nonexistent attribute or method'):
@torch.jit.script
def unknown_builtin(x):
return x.splork(3)
Expand Down Expand Up @@ -11966,12 +11966,12 @@ def f(x):

self.checkScript(f, (torch.rand(20, 20, 20),), optimize=True)

with self.assertRaisesRegex(RuntimeError, "Unknown attribute to named tuple"):
with self.assertRaisesRegex(RuntimeError, "nonexistent attribute"):
@torch.jit.script
def g1(x):
return x.max(dim=1).unknown_symbol

with self.assertRaisesRegex(RuntimeError, "Getting attributes of tuples is not supported"):
with self.assertRaisesRegex(RuntimeError, "nonexistent attribute"):
@torch.jit.script
def g2(x):
print((x, x, x).__doc__)
Expand Down Expand Up @@ -19910,7 +19910,7 @@ def call(): # noqa: E306
for func in ops:
self.checkScript(func, ())

with self.assertRaisesRegex(RuntimeError, "nonexistent attribute __add__. Did you forget to initialize it"):
with self.assertRaisesRegex(RuntimeError, "nonexistent attribute"):
@torch.jit.script
def test():
return Foo(torch.tensor(1)) + Foo(torch.tensor(1))
Expand Down
8 changes: 8 additions & 0 deletions torch/csrc/jit/ir.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1711,6 +1711,14 @@ Node* ProfileOp::allocNewInstance(Graph* g) {
return new ProfileOp(g, {nullptr});
}

TypePtr NamedValue::type() const {
if (value_) {
return value_->type();
} else {
return ivalue_.type();
}
}

constexpr Symbol ProfileOp::Kind;
} // namespace jit
} // namespace torch
2 changes: 2 additions & 0 deletions torch/csrc/jit/named_value.h
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ struct NamedValue {
return *loc_;
}

at::TypePtr type() const;

private:
c10::optional<SourceRange> loc_;
c10::optional<std::string> name_;
Expand Down
91 changes: 59 additions & 32 deletions torch/csrc/jit/script/sugared_value.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -87,51 +87,53 @@ std::shared_ptr<SugaredValue> SimpleValue::attr(
return std::make_shared<SimpleValue>(r);
}
}
if (value_->type()->isSubtypeOf(NumberType::get())) {
throw ErrorReport(loc) << "Cannot call methods on numbers";
}

// accessing fields of named tuples
if (auto tuple_type = value_->type()->cast<TupleType>()) {
if (!tuple_type->schema()) {
throw ErrorReport(loc) << "Getting attributes of tuples is not supported";
}
auto attrs = tuple_type->schema()->arguments();
for (size_t i = 0; i < attrs.size(); i++) {
if (attrs[i].name() == field) {
auto idx = m.graph()->insertConstant(IValue(static_cast<int64_t>(i)));
auto out_type = tuple_type->elements().at(i);
auto r =
m.graph()
->insertNode(m.graph()->createTupleIndex(value_, idx, out_type))
->output();
return std::make_shared<SimpleValue>(r);
if (tuple_type->schema()) {
auto attrs = tuple_type->schema()->arguments();
for (size_t i = 0; i < attrs.size(); i++) {
if (attrs[i].name() == field) {
auto idx = m.graph()->insertConstant(IValue(static_cast<int64_t>(i)));
auto out_type = tuple_type->elements().at(i);
auto r = m.graph()
->insertNode(
m.graph()->createTupleIndex(value_, idx, out_type))
->output();
return std::make_shared<SimpleValue>(r);
}
}
}
throw ErrorReport(loc) << "Unknown attribute to named tuple";
}

if (auto classType = value_->type()->cast<ClassType>()) {
} else if (auto classType = value_->type()->cast<ClassType>()) {
// This is a class, emit the proper attribute lookup
if (auto method = classType->getMethod(field)) {
return std::make_shared<MethodValue>(getValue(), field);
}
if (!classType->hasAttribute(field)) {
throw ErrorReport(loc)
<< "Tried to access nonexistent attribute " << field
<< ". Did you forget to initialize it in __init__()?";
if (classType->hasAttribute(field)) {
auto& g = *m.graph();
auto n = g.insertNode(g.createGetAttr(value_, field));
return std::make_shared<SimpleValue>(n->output());
}
auto& g = *m.graph();
auto n = g.insertNode(g.createGetAttr(value_, field));
return std::make_shared<SimpleValue>(n->output());
}

if (auto iface = value_->type()->cast<InterfaceType>()) {
} else if (auto iface = value_->type()->cast<InterfaceType>()) {
// accessing methods of interfaces
if (auto schema = iface->getMethod(field)) {
return std::make_shared<MethodValue>(getValue(), field);
}
}

return std::make_shared<BuiltinFunction>(
Symbol::aten(field), NamedValue(loc, "self", value_));
// none of the more-specific cases worked, so see if this is a builtin method
if (auto builtin = BuiltinFunction::tryCreate(
Symbol::aten(field), NamedValue(loc, "self", value_))) {
return builtin;
}

ErrorReport report(loc);
report << "Tried to access nonexistent attribute or method '" << field
<< "' of type '" << value_->type()->python_str() << "'.";
if (value_->type()->kind() == ClassType::Kind) {
report << " Did you forget to initialize an attribute in __init__()?";
}
throw report;
}

std::vector<std::shared_ptr<SugaredValue>> SimpleValue::asTuple(
Expand Down Expand Up @@ -501,6 +503,31 @@ std::shared_ptr<SugaredValue> NamedTupleConstructor::call(
return std::make_shared<SimpleValue>(self);
}

std::shared_ptr<BuiltinFunction> BuiltinFunction::tryCreate(
Symbol symbol,
c10::optional<NamedValue> self) {
for (const std::shared_ptr<Operator>& op : getAllOperatorsFor(symbol)) {
if (!self) {
return std::make_shared<BuiltinFunction>(symbol, nullptr);
}
if (auto index = op->schema().argumentIndexWithName("self")) {
std::unordered_map<std::string, TypePtr> type_env;
TypePtr formal_type = op->schema().arguments().at(*index).type();
const MatchTypeReturn matched =
matchTypeVariables(formal_type, self->type(), type_env);
if (!matched.success()) {
continue;
}
const auto concrete_type = tryEvalTypeVariables(formal_type, type_env);
if (!concrete_type || !self->type()->isSubtypeOf(concrete_type)) {
continue;
}
return std::make_shared<BuiltinFunction>(symbol, self);
}
}
return nullptr;
}

} // namespace script
} // namespace jit
} // namespace torch
7 changes: 7 additions & 0 deletions torch/csrc/jit/script/sugared_value.h
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,13 @@ struct TORCH_API BuiltinFunction : public SugaredValue {
at::ArrayRef<NamedValue> attributes,
at::ArrayRef<NamedValue> inputs,
size_t n_binders) override;

// try to create this builtin but if it doesn't exist or the self argument
// cannot possibly match, then return nullptr. Use in situations where it is
// not clear if it is a valid builtin
static std::shared_ptr<BuiltinFunction> tryCreate(
Symbol symbol,
c10::optional<NamedValue> self);
};

struct TORCH_API BuiltinModule : public SugaredValue {
Expand Down

0 comments on commit 9ade1e6

Please sign in to comment.