Skip to content

Commit

Permalink
Apply ufmt to torch internal (pytorch#81643)
Browse files Browse the repository at this point in the history
This is a big bang PR, merge conflicts are probably expected and will be addressed at merge.
Pull Request resolved: pytorch#81643
Approved by: https://github.com/ezyang
  • Loading branch information
huydhn authored and pytorchmergebot committed Jul 22, 2022
1 parent f595467 commit 12cb265
Show file tree
Hide file tree
Showing 48 changed files with 8,343 additions and 4,469 deletions.
8 changes: 8 additions & 0 deletions .lintrunner.toml
Original file line number Diff line number Diff line change
Expand Up @@ -689,6 +689,13 @@ code = 'UFMT'
include_patterns = [
'test/onnx/**/*.py',
'tools/**/*.py',
'torch/_decomp/**/*.py',
'torch/_lazy/**/*.py',
'torch/_masked/**/*.py',
'torch/_prims/**/*.py',
'torch/_refs/**/*.py',
'torch/_subclasses/**/*.py',
'torch/_*.py',
'torchgen/**/*.py',
]
command = [
Expand All @@ -699,6 +706,7 @@ command = [
]
exclude_patterns = [
'tools/gen_vulkan_spv.py',
'torch/__init__.py', # Skip this file to format because it's part of the public API
]
init_command = [
'python3',
Expand Down
3 changes: 2 additions & 1 deletion torch/_VF.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@
introducing torch._VF
"""
import torch
import sys
import types

import torch


class VFModule(types.ModuleType):
vf: types.ModuleType
Expand Down
2 changes: 2 additions & 0 deletions torch/__config__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,15 @@ def show():
"""
return torch._C._show_config()


# TODO: In principle, we could provide more structured version/config
# information here. For now only CXX_FLAGS is exposed, as Timer
# uses them.
def _cxx_flags():
"""Returns the CXX_FLAGS used when building PyTorch."""
return torch._C._cxx_flags()


def parallel_info():
r"""Returns detailed string with parallelization settings"""
return torch._C._parallel_info()
2 changes: 2 additions & 0 deletions torch/__future__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@
"""
_overwrite_module_params_on_conversion = False


def set_overwrite_module_params_on_conversion(value):
global _overwrite_module_params_on_conversion
_overwrite_module_params_on_conversion = value


def get_overwrite_module_params_on_conversion():
return _overwrite_module_params_on_conversion
Loading

0 comments on commit 12cb265

Please sign in to comment.