Skip to content

Commit

Permalink
[pytorch][codegen] add fully migrated scripts to mypy strict config (p…
Browse files Browse the repository at this point in the history
…ytorch#47747)

Summary:
Pull Request resolved: pytorch#47747

Moved MANUAL_AUTOGRAD / etc to gen_trace_type.py to avoid mypy from
scanning not yet migrated gen_variable_type.py.

Differential Revision: D24885066

Test Plan: Imported from OSS

Reviewed By: ezyang

Pulled By: ljk53

fbshipit-source-id: bf420e21c26f45fe2b94977bc6df840ffd8a3128
  • Loading branch information
ljk53 authored and facebook-github-bot committed Nov 14, 2020
1 parent 4ff8cd8 commit c936b43
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 28 deletions.
3 changes: 3 additions & 0 deletions mypy-strict.ini
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ implicit_reexport = False
strict_equality = True

files = tools/codegen/gen.py,
tools/autograd/gen_annotated_fn_args.py,
tools/autograd/gen_python_functions.py,
tools/autograd/gen_trace_type.py,
torch/utils/benchmark/utils/common.py,
torch/utils/benchmark/utils/timer.py,
torch/utils/benchmark/utils/valgrind_wrapper/*.py,
Expand Down
29 changes: 27 additions & 2 deletions tools/autograd/gen_trace_type.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,37 @@
from typing import Optional, List, Sequence
import itertools
from typing import Optional, List, Sequence, Union

from tools.codegen.api.types import *
import tools.codegen.api.cpp as cpp
from tools.codegen.code_template import CodeTemplate
from tools.codegen.gen import with_native_function, parse_native_yaml, FileManager, mapMaybe
from tools.codegen.model import *

from .gen_variable_type import MANUAL_TRACER
# Note [Manual Backend kernels]
# For these ops, we want to manually register to dispatch key Backend and
# skip codegen-ed registeration to all keys before Backend.
# For codegen this means:
# - op set below must match ops with manual_kernel_registration=True in native_functions.yaml
# where we skip codegen backend kernels
# - all ops below are part of MANUAL_AUTOGRAD to skip codegen Autograd kernel registration
# - all ops below are part of MANUAL_TRACER to skip codegen Tracer kernel registration
# Note: we still register to dispatch key Profiler for these ops, keeping it untouched for now.
# You can find the manual registration in torch/csrc/autograd/VariableTypeManual.cpp
MANUAL_BACKEND = set([
'options', 'data', 'set_data', 'is_leaf', 'output_nr', '_version', 'retain_grad',
'_backward', 'requires_grad_',
])

# For these ops we want to skip the codegen-ed registration to both Autograd and Tracer keys.
# You can find the manual registration in torch/csrc/autograd/VariableTypeManual.cpp
MANUAL_AUTOGRAD_AND_TRACER = set([
'resize_', 'resize_as_', 'detach', 'detach_', 'copy_',
])

# Currently MANUAL_AUTOGRAD and MANUAL_TRACER share the same set of ops:
# union(MANUAL_BACKEND, MANUAL_AUTOGRAD_AND_TRACER)
# You can find the manual registration in torch/csrc/autograd/VariableTypeManual.cpp
MANUAL_AUTOGRAD = MANUAL_TRACER = MANUAL_BACKEND | MANUAL_AUTOGRAD_AND_TRACER

# These functions we don't want to record for tracing, because we always want
# to trace their constituent parts. This is a temporary hack in lieue
Expand Down
27 changes: 1 addition & 26 deletions tools/autograd/gen_variable_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,32 +27,7 @@
from .gen_autograd import VIEW_FUNCTIONS, VIEW_FUNCTIONS_WITH_METADATA_CHANGE, \
MULTI_OUTPUT_SAFE_FUNCTIONS, RETURNS_VIEWS_OF_INPUT
from .gen_autograd_functions import uses_single_grad

# Note [Manual Backend kernels]
# For these ops, we want to manually register to dispatch key Backend and
# skip codegen-ed registeration to all keys before Backend.
# For codegen this means:
# - op set below must match ops with manual_kernel_registration=True in native_functions.yaml
# where we skip codegen backend kernels
# - all ops below are part of MANUAL_AUTOGRAD to skip codegen Autograd kernel registration
# - all ops below are part of MANUAL_TRACER to skip codegen Tracer kernel registration
# Note: we still register to dispatch key Profiler for these ops, keeping it untouched for now.
# You can find the manual registration in torch/csrc/autograd/VariableTypeManual.cpp
MANUAL_BACKEND = set([
'options', 'data', 'set_data', 'is_leaf', 'output_nr', '_version', 'retain_grad',
'_backward', 'requires_grad_',
])

# For these ops we want to skip the codegen-ed registration to both Autograd and Tracer keys.
# You can find the manual registration in torch/csrc/autograd/VariableTypeManual.cpp
MANUAL_AUTOGRAD_AND_TRACER = set([
'resize_', 'resize_as_', 'detach', 'detach_', 'copy_',
])

# Currently MANUAL_AUTOGRAD and MANUAL_TRACER share the same set of ops:
# union(MANUAL_BACKEND, MANUAL_AUTOGRAD_AND_TRACER)
# You can find the manual registration in torch/csrc/autograd/VariableTypeManual.cpp
MANUAL_AUTOGRAD = MANUAL_TRACER = MANUAL_BACKEND | MANUAL_AUTOGRAD_AND_TRACER
from .gen_trace_type import MANUAL_BACKEND, MANUAL_AUTOGRAD_AND_TRACER, MANUAL_AUTOGRAD

# We don't set or modify grad_fn on these methods. Generally, they return
# tensors that have requires_grad=False. In-place functions listed here will
Expand Down

0 comments on commit c936b43

Please sign in to comment.