Skip to content

Commit eb75534

Browse files
craymichaelfacebook-github-bot
authored andcommitted
Clean up pyre-fixme/ignore comments that are no longer suppressing errors (meta-pytorch#1554)
Summary: Pull Request resolved: meta-pytorch#1554 Clean up pyre-fixme/ignore comments that are no longer suppressing errors throughout the entire Captum library. Reviewed By: sarahtranfb Differential Revision: D74049476 fbshipit-source-id: bba23dbc91e317cb715b72537e97aa399781c8d0
1 parent 7749921 commit eb75534

File tree

18 files changed

+2
-49
lines changed

18 files changed

+2
-49
lines changed

captum/_utils/gradient.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -682,8 +682,6 @@ def compute_layer_gradients_and_eval(
682682
" take gradient with respect to multiple outputs."
683683
)
684684

685-
# pyre-fixme[6]: For 2nd argument expected `Dict[Module, Dict[device,
686-
# typing.Tuple[Tensor, ...]]]` but got `Module`.
687685
device_ids = _extract_device_ids(forward_fn, saved_layer, device_ids)
688686

689687
# Identifies correct device ordering based on device ids.
@@ -729,7 +727,6 @@ def compute_layer_gradients_and_eval(
729727
for layer_tensor in saved_layer[single_layer][device_id]
730728
)
731729
saved_grads = torch.autograd.grad(
732-
# pyre-fixme[6]: For 1st argument expected `Tensor` but got `Module`.
733730
outputs=torch.unbind(output),
734731
inputs=grad_inputs,
735732
**grad_kwargs or {},

captum/_utils/models/linear_model/train.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,6 @@ def __init__(self, mean, std, n=None, eps: float = 1e-8) -> None:
275275
self.mean = mean
276276
# pyre-fixme[4]: Attribute must be annotated.
277277
self.std = std
278-
# pyre-fixme[4]: Attribute must be annotated.
279278
self.eps = eps
280279

281280
# pyre-fixme[3]: Return type must be annotated.

captum/attr/_core/integrated_gradients.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,6 @@ def attribute(
8888
) -> Tuple[TensorOrTupleOfTensorsGeneric, Tensor]: ...
8989

9090
@typing.overload
91-
# pyre-fixme[43]: The implementation of `attribute` does not accept all possible
92-
# arguments of overload defined on line `82`.
9391
def attribute(
9492
self,
9593
inputs: TensorOrTupleOfTensorsGeneric,
@@ -262,14 +260,10 @@ def attribute( # type: ignore
262260
# converting it into a tuple.
263261
is_inputs_tuple = _is_tuple(inputs)
264262

265-
# pyre-fixme[9]: inputs has type `TensorOrTupleOfTensorsGeneric`; used as
266-
# `Tuple[Tensor, ...]`.
267263
formatted_inputs, formatted_baselines = _format_input_baseline(
268264
inputs, baselines
269265
)
270266

271-
# pyre-fixme[6]: For 1st argument expected `Tuple[Tensor, ...]` but got
272-
# `TensorOrTupleOfTensorsGeneric`.
273267
_validate_input(formatted_inputs, formatted_baselines, n_steps, method)
274268

275269
if internal_batch_size is not None:

captum/attr/_core/lime.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -566,7 +566,6 @@ def _evaluate_batch(
566566
) -> Tensor:
567567
model_out = _run_forward(
568568
self.forward_func,
569-
# pyre-fixme[6]: For 1st argument expected `Sequence[Variable[TupleOrTens...
570569
_reduce_list(curr_model_inputs),
571570
expanded_target,
572571
expanded_additional_args,

captum/attr/_core/occlusion.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -323,8 +323,6 @@ def _construct_ablated_input(
323323
torch.ones(1, dtype=torch.long, device=expanded_input.device)
324324
- input_mask
325325
).to(expanded_input.dtype)
326-
# pyre-fixme[58]: `*` is not supported for operand types `Union[None, float,
327-
# Tensor]` and `Tensor`.
328326
) + (baseline * input_mask.to(expanded_input.dtype))
329327
return ablated_tensor, input_mask
330328

captum/attr/_utils/attribution.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,6 @@
2424
from torch.nn import Module
2525

2626

27-
# pyre-fixme[13]: Attribute `attribute` is never initialized.
28-
# pyre-fixme[13]: Attribute `compute_convergence_delta` is never initialized.
2927
class Attribution:
3028
r"""
3129
All attribution algorithms extend this class. It enforces its child classes
@@ -471,7 +469,6 @@ def interpolate(
471469
return F.interpolate(layer_attribution, interpolate_dims, mode=interpolate_mode)
472470

473471

474-
# pyre-fixme[13]: Attribute `attribute` is never initialized.
475472
# pyre-fixme[24]: Generic type `InternalAttribution` expects 1 type parameter.
476473
class NeuronAttribution(InternalAttribution):
477474
r"""

captum/attr/_utils/lrp_rules.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,6 @@ def _backward_hook_input(grad):
6868
device = grad.device
6969
# pyre-fixme[16]: `PropagationRule` has no attribute `_has_single_input`.
7070
if self._has_single_input:
71-
# pyre-fixme[16]: `PropagationRule` has no attribute `relevance_input`.
7271
self.relevance_input[device] = relevance.data
7372
else:
7473
cast(List[Tensor], self.relevance_input[device]).append(relevance.data)
@@ -82,14 +81,12 @@ def _backward_hook_input(grad):
8281

8382
# pyre-fixme[3]: Return type must be annotated.
8483
def _create_backward_hook_output(self, outputs: torch.Tensor):
85-
# pyre-fixme[53]: Captured variable `outputs` is not annotated.
8684
# pyre-fixme[3]: Return type must be annotated.
8785
# pyre-fixme[2]: Parameter must be annotated.
8886
def _backward_hook_output(grad):
8987
sign = torch.sign(outputs)
9088
sign[sign == 0] = 1
9189
relevance = grad / (outputs + sign * self.STABILITY_FACTOR)
92-
# pyre-fixme[16]: `PropagationRule` has no attribute `relevance_output`.
9390
self.relevance_output[grad.device] = grad.data
9491
return relevance
9592

@@ -138,7 +135,6 @@ class EpsilonRule(PropagationRule):
138135
"""
139136

140137
def __init__(self, epsilon: float = 1e-9) -> None:
141-
# pyre-fixme[4]: Attribute must be annotated.
142138
self.STABILITY_FACTOR = epsilon
143139

144140
# pyre-fixme[2]: Parameter must be annotated.
@@ -159,9 +155,7 @@ class GammaRule(PropagationRule):
159155
"""
160156

161157
def __init__(self, gamma: float = 0.25, set_bias_to_zero: bool = False) -> None:
162-
# pyre-fixme[4]: Attribute must be annotated.
163158
self.gamma = gamma
164-
# pyre-fixme[4]: Attribute must be annotated.
165159
self.set_bias_to_zero = set_bias_to_zero
166160

167161
# pyre-fixme[2]: Parameter must be annotated.
@@ -188,7 +182,6 @@ class Alpha1_Beta0_Rule(PropagationRule):
188182
"""
189183

190184
def __init__(self, set_bias_to_zero: bool = False) -> None:
191-
# pyre-fixme[4]: Attribute must be annotated.
192185
self.set_bias_to_zero = set_bias_to_zero
193186

194187
# pyre-fixme[2]: Parameter must be annotated.
@@ -215,7 +208,6 @@ def _create_backward_hook_input(self, inputs):
215208
# pyre-fixme[3]: Return type must be annotated.
216209
# pyre-fixme[2]: Parameter must be annotated.
217210
def _backward_hook_input(grad):
218-
# pyre-fixme[16]: `IdentityRule` has no attribute `relevance_output`.
219211
return self.relevance_output[grad.device]
220212

221213
return _backward_hook_input

captum/concept/_core/concept.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ def __repr__(self) -> str:
5858
return "Concept(%r, %r)" % (self.id, self.name)
5959

6060

61-
# pyre-fixme[13]: Attribute `interpret` is never initialized.
6261
class ConceptInterpreter:
6362
r"""
6463
An abstract class that exposes an abstract interpret method

captum/insights/example.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
# pyre-strict
44
import warnings
55

6-
# pyre-fixme[21]: Could not find name `Net` in `captum.insights.attr_vis.example`.
76
from captum.insights.attr_vis.example import * # noqa
87

98
warnings.warn(

captum/robust/_core/fgsm.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -156,8 +156,6 @@ def perturb(
156156
is returned. If a tuple is provided for inputs, a tuple of
157157
corresponding sized tensors is returned.
158158
"""
159-
# pyre-fixme[6]: For 1st argument expected `Tensor` but got
160-
# `TensorOrTupleOfTensorsGeneric`.
161159
is_inputs_tuple = _is_tuple(inputs)
162160
# pyre-fixme[35]: Target cannot be annotated.
163161
inputs: Tuple[Tensor, ...] = _format_tensor_into_tuples(inputs)

0 commit comments

Comments
 (0)