Skip to content

Commit

Permalink
Revert "Slightly improve AOTAutograd logging with ViewAndMutationMeta (
Browse files Browse the repository at this point in the history
…pytorch#105702)"

This reverts commit cc13734.

Reverted pytorch#105702 on behalf of https://github.com/PaliC due to breaking internal export tests (relevant details shared with author) ([comment](pytorch#105702 (comment)))
  • Loading branch information
pytorchmergebot committed Jul 25, 2023
1 parent 3eef86d commit 48cd8e2
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 35 deletions.
50 changes: 25 additions & 25 deletions torch/_export/trace.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,35 +65,10 @@ def functional_call(*args):

out_spec = None

graph_module = None
num_fwd_returns = None

def fw_compiler(gm, inputs):
nonlocal graph_module
graph_module = gm

def partition_fn(joint_module, joint_inputs, *, num_fwd_outputs, **kwargs):
nonlocal num_fwd_returns
num_fwd_returns = num_fwd_outputs
return default_partition(
joint_module, joint_inputs, num_fwd_outputs=num_fwd_outputs, **kwargs
)

aot_config = AOTConfig(
fw_compiler=fw_compiler,
bw_compiler=lambda gm, inputs: None,
partition_fn=partition_fn,
decompositions=CORE_ATEN_DECOMPOSITIONS_TABLE, # type: ignore[arg-type]
num_params_buffers=params_len,
aot_id=-1,
keep_inference_input_mutations=False,
)

with enable_python_dispatcher():
fw_metadata = run_functionalized_fw_and_collect_metadata(
lambda *args: pytree.tree_flatten(functional_call(*args))[0],
keep_input_mutations=False,
aot_config=aot_config,
)(*copy.deepcopy(full_args)) # type: ignore[operator]

assert len(fw_metadata.input_info) == len(full_args)
Expand All @@ -103,6 +78,21 @@ def partition_fn(joint_module, joint_inputs, *, num_fwd_outputs, **kwargs):
if input_info.mutates_data or input_info.mutates_metadata
]

graph_module = None

def fw_compiler(gm, inputs):
nonlocal graph_module
graph_module = gm

num_fwd_returns = None

def partition_fn(joint_module, joint_inputs, *, num_fwd_outputs, **kwargs):
nonlocal num_fwd_returns
num_fwd_returns = num_fwd_outputs
return default_partition(
joint_module, joint_inputs, num_fwd_outputs=num_fwd_outputs, **kwargs
)

def set_state_proxies(state_args):
modes = get_torch_dispatch_modes()
proxy_tensor_modes = [m for m in modes if isinstance(m, ProxyTorchDispatchMode)]
Expand All @@ -115,6 +105,16 @@ def set_state_proxies(state_args):
)
set_proxy_slot(arg, tracer, params_flat[i])

aot_config = AOTConfig(
fw_compiler=fw_compiler,
bw_compiler=lambda gm, inputs: None,
partition_fn=partition_fn,
decompositions=CORE_ATEN_DECOMPOSITIONS_TABLE, # type: ignore[arg-type]
num_params_buffers=params_len,
aot_id=-1,
keep_inference_input_mutations=False,
)

def exported_call(*args):
state_args = args[:params_len]
unwrapped_state_args = _unwrap_all_tensors_from_functional(
Expand Down
11 changes: 1 addition & 10 deletions torch/_functorch/aot_autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
from . import config
from .partitioners import default_partition
from torch._guards import TracingContext, DuplicateInputs, Source
from torchgen.utils import dataclass_repr

log = logging.getLogger(__name__)
aot_joint_log = getArtifactLogger(__name__, "aot_joint_graph")
Expand Down Expand Up @@ -579,9 +578,6 @@ def symints_saved_for_backwards_slice(self):
else:
return slice(0, 0) # empty slice

def __str__(self):
return dataclass_repr(self)

def __eq__(self, other):
if not isinstance(other, ViewAndMutationMeta):
return NotImplemented
Expand Down Expand Up @@ -715,8 +711,7 @@ def _get_hints(exprs):
def run_functionalized_fw_and_collect_metadata(
f,
*,
keep_input_mutations: bool,
aot_config: 'AOTConfig',
keep_input_mutations: bool
) -> ViewAndMutationMeta:
memo = {}

Expand Down Expand Up @@ -950,7 +945,6 @@ def inner(*flat_args):
keep_input_mutations=keep_input_mutations,
traced_tangents=traced_tangents,
)
log.debug("ViewAndMutationMeta for %s:\n%s", aot_config.aot_id, metadata)
return metadata

return inner
Expand Down Expand Up @@ -2204,7 +2198,6 @@ def wrapped_flat_fn(*args):
ref_fw_metadata = run_functionalized_fw_and_collect_metadata(
wrapped_flat_fn,
keep_input_mutations=fw_metadata.keep_input_mutations,
aot_config=aot_config,
)(*deduped_flat_args)
assert ref_fw_metadata == updated_fw_metadata, \
f'ref_metadata={str(ref_fw_metadata)}, actual_metadata={str(updated_fw_metadata)}'
Expand Down Expand Up @@ -2345,7 +2338,6 @@ def wrapped_flat_fn(*args):
ref_fw_metadata = run_functionalized_fw_and_collect_metadata(
wrapped_flat_fn,
keep_input_mutations=fw_metadata.keep_input_mutations,
aot_config=aot_config,
)(*flat_args_with_synthetic_bases)
assert ref_fw_metadata == fw_metadata_updated, (
f'ref_metadata={pprint.pformat(partial_asdict(ref_fw_metadata))}, '
Expand Down Expand Up @@ -3270,7 +3262,6 @@ def convert(idx, x):
fw_metadata = run_functionalized_fw_and_collect_metadata(
flat_fn,
keep_input_mutations=aot_config.keep_inference_input_mutations and not needs_autograd,
aot_config=aot_config,
)(*fake_flat_args)

if aot_config.is_export:
Expand Down

0 comments on commit 48cd8e2

Please sign in to comment.