Skip to content

Commit d1c4e6b

Browse files
Skylion007pytorchmergebot
authored andcommitted
[BE]: Enable a few additional ruff rules (pytorch#130700)
Enables a few extra ruff rules, most of which do not have any violations as I already cleaned them with earlier PRs, these just turns them on to enforce them. Adds 1 noqa as we want the suboptimal lambda generation + call kept as a test. Also enables the test in flake8 Pull Request resolved: pytorch#130700 Approved by: https://github.com/justinchuby, https://github.com/ezyang
1 parent c24c50d commit d1c4e6b

File tree

5 files changed

+6
-4
lines changed

5 files changed

+6
-4
lines changed

.flake8

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# NOTE: **Mirror any changes** to this file the [tool.ruff] config in pyproject.toml
33
# before we can fully move to use ruff
44
enable-extensions = G
5-
select = B,C,E,F,G,P,SIM1,T4,W,B9,TOR0,TOR1,TOR2,TOR9
5+
select = B,C,E,F,G,P,SIM1,SIM911,T4,W,B9,TOR0,TOR1,TOR2,TOR9
66
max-line-length = 120
77
# C408 ignored because we like the dict keyword argument syntax
88
# E501 is not flexible enough, we're using B950 instead

pyproject.toml

+3
Original file line numberDiff line numberDiff line change
@@ -95,6 +95,7 @@ select = [
9595
"EXE",
9696
"F",
9797
"SIM1",
98+
"SIM911",
9899
"W",
99100
# Not included in flake8
100101
"FURB",
@@ -110,6 +111,7 @@ select = [
110111
"PLC0131", # type bivariance
111112
"PLC0132", # type param mismatch
112113
"PLC0205", # string as __slots__
114+
"PLC3002", # unnecessary-direct-lambda-call
113115
"PLE",
114116
"PLR0133", # constant comparison
115117
"PLR0206", # property with params
@@ -137,6 +139,7 @@ select = [
137139
"RUF016", # type error non-integer index
138140
"RUF017",
139141
"RUF018", # no assignment in assert
142+
"RUF019", # unnecessary-key-check
140143
"RUF024", # from keys mutable
141144
"RUF026", # default factory kwarg
142145
"TCH",

test/jit/test_tracer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1482,7 +1482,7 @@ def addTwo(self, x):
14821482
return x + 2
14831483

14841484
def forward(self, input):
1485-
return (lambda a: a + 1)(input)
1485+
return (lambda a: a + 1)(input) # noqa: PLC3002
14861486

14871487
# When tracing Bar as a submodule, we only want to script the
14881488
# exported methods, and we want to keep the forwards still

test/test_linalg.py

-1
Original file line numberDiff line numberDiff line change
@@ -3696,7 +3696,6 @@ def test_linalg_qr_autograd_errors(self, device, dtype):
36963696
with self.assertRaisesRegex(RuntimeError,
36973697
"The derivative of linalg.qr depends on Q"):
36983698
b.backward()
3699-
#
37003699
inp = torch.randn((7, 5), device=device, dtype=dtype, requires_grad=True)
37013700
q, r = torch.linalg.qr(inp, mode='complete')
37023701
b = torch.sum(r)

torch/_dynamo/trace_rules.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -281,7 +281,7 @@
281281
"torch._functorch.deprecated.grad": UserFunctionVariable,
282282
"torch._functorch.deprecated.grad_and_value": UserFunctionVariable,
283283
"torch._functorch.deprecated.vjp": UserFunctionVariable,
284-
#
284+
# everything else
285285
"torch._constrain_as_size": UserFunctionVariable,
286286
"torch._tensor._convert": UserFunctionVariable,
287287
"torch.jit._unwrap_optional": UserFunctionVariable,

0 commit comments

Comments
 (0)