[BE]: Enable F821 and fix bugs (#116579)
Fixes #112371
I tried to fix as many of the bugs as I could, a few I could not figure out what the proper fix for them was though and so I left them with noqas.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/116579
Approved by: https://github.com/ezyang
diff --git a/test/test_mps.py b/test/test_mps.py
index cf628a7..1e704f2 100644
--- a/test/test_mps.py
+++ b/test/test_mps.py
@@ -1389,11 +1389,11 @@
return joined_x.view(1, joined_x.numel())
def _avg_pool2d(self, x, kernel_size):
- size = reduce(operator.mul, kernel_size)
+ size = reduce(operator.mul, kernel_size) # noqa: F821
return self._sum_pool2d(x, kernel_size) / size
def _avg_pool3d(self, x, kernel_size):
- size = reduce(operator.mul, kernel_size)
+ size = reduce(operator.mul, kernel_size) # noqa: F821
return self._sum_pool3d(x, kernel_size) / size
def test_avg_pool2d_with_zero_divisor(self):
@@ -6520,7 +6520,7 @@
devices += ['mps']
def _gelu_ref(X):
- return X * stats.norm.cdf(X)
+ return X * stats.norm.cdf(X) # noqa: F821
for d in devices:
X = torch.rand(n, m, dtype=dtype, requires_grad=True, device=d)[:, ::2]