Skip to content

Commit

Permalink
fix: Flake8-BugBear code B-026 for PyTorch (pytorch#111362)
Browse files Browse the repository at this point in the history
Fixes pytorch#106571

I have fixed the B-026 error codes for Flake8 tests on the codebase. Please review and tell me anything else to do.
Thanks and excited for this first contribution to PyTorch.

Also I refer this issue which introduced [B-026](PyCQA/flake8-bugbear#286) in `pytest-bugbear` and discuss the error code.
Pull Request resolved: pytorch#111362
Approved by: https://github.com/Skylion007
  • Loading branch information
guptaaryan16 authored and pytorchmergebot committed Nov 7, 2023
1 parent 2da062d commit 8cee0a2
Show file tree
Hide file tree
Showing 8 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ ignore =
# to line this up with executable bit
EXE001,
# these ignores are from flake8-bugbear; please fix!
B007,B008,B017,B019,B023,B026,B028,B903,B904,B905,B906,B907
B007,B008,B017,B019,B023,B028,B903,B904,B905,B906,B907
# these ignores are from flake8-comprehensions; please fix!
C407,
# these ignores are from flake8-logging-format; please fix!
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ ignore = [
"B007", "B008", "B017",
"B018", # Useless expression
"B019",
"B023", "B026",
"B023",
"B028", # No explicit `stacklevel` keyword argument found
"B904",
"E402",
Expand Down
2 changes: 1 addition & 1 deletion test/distributed/fsdp/test_fsdp_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def __init__(
l3 = ckpt_wrapper(l3)

fsdp_wrapper = partial(
_maybe_wrap_fsdp, wrap_fsdp=wrap_fsdp, *fsdp_args, **fsdp_kwargs
_maybe_wrap_fsdp, *fsdp_args, wrap_fsdp=wrap_fsdp, **fsdp_kwargs
)
self.ffn = nn.Sequential(
fsdp_wrapper(l1),
Expand Down
4 changes: 2 additions & 2 deletions test/distributed/fsdp/test_fsdp_state_dict.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,8 +230,8 @@ def _get_multibuffer_nested_model(
bn1 = checkpoint_wrapper(bn1)
lin2 = checkpoint_wrapper(lin2)
seq = nn.Sequential(
FSDP(lin1, mixed_precision=lin_mp, *fsdp_args, **fsdp_kwargs),
FSDP(bn1, mixed_precision=bn_mp, *fsdp_args, **fsdp_kwargs),
FSDP(lin1, *fsdp_args, mixed_precision=lin_mp, **fsdp_kwargs),
FSDP(bn1, *fsdp_args, mixed_precision=bn_mp, **fsdp_kwargs),
lin2,
)
if checkpoint_wrap:
Expand Down
2 changes: 1 addition & 1 deletion test/onnx/test_operators.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@

def export_to_pbtxt(model, inputs, *args, **kwargs):
return torch.onnx.export_to_pretty_string(
model, inputs, google_printer=True, *args, **kwargs
model, inputs, *args, google_printer=True, **kwargs
)


Expand Down
2 changes: 1 addition & 1 deletion test/test_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -986,7 +986,7 @@ def helper(with_out, expectFail, op_to_test, inputs, *args, **kwargs):
try:
if with_out:
out = torch.empty(0, dtype=torch.int32, device=device)
op_to_test(inputs, out=out, *args, **kwargs)
op_to_test(inputs, *args, out=out, **kwargs)
else:
out = op_to_test(inputs, *args, **kwargs)
self.assertFalse(expectFail)
Expand Down
2 changes: 1 addition & 1 deletion torch/_functorch/aot_autograd.py
Original file line number Diff line number Diff line change
Expand Up @@ -4716,7 +4716,7 @@ def functional_call(named_params, named_buffers, *args, **kwargs):
named_buffers = dict(mod.named_buffers(remove_duplicate=False))
num_params_buffers = len(named_params) + len(named_buffers)
compiled_f = aot_function(
functional_call, num_params_buffers=num_params_buffers, *args, **kwargs
functional_call, *args, num_params_buffers=num_params_buffers, **kwargs
)

class AOTModule(nn.Module):
Expand Down
2 changes: 1 addition & 1 deletion torch/utils/data/datapipes/iter/combinatorics.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self,
self.sampler_args = () if sampler_args is None else sampler_args
self.sampler_kwargs = {} if sampler_kwargs is None else sampler_kwargs
# https://github.com/python/mypy/pull/9629 will solve
self.sampler = sampler(data_source=self.datapipe, *self.sampler_args, **self.sampler_kwargs) # type: ignore[misc]
self.sampler = sampler(*self.sampler_args, data_source=self.datapipe, **self.sampler_kwargs) # type: ignore[misc]

def __iter__(self) -> Iterator[T_co]:
return iter(self.sampler)
Expand Down

0 comments on commit 8cee0a2

Please sign in to comment.