|
4 | 4 | import functools |
5 | 5 | import json |
6 | 6 | import os |
7 | | -# torch.compile needs typing.List. It will fail torch.library.infer_schema |
8 | | -# otherwise |
9 | | -from typing import List # noqa: UP035 |
10 | 7 | from typing import Any, Callable, Optional, Union |
11 | 8 |
|
12 | 9 | import torch |
@@ -1229,7 +1226,7 @@ def inplace_fused_experts( |
1229 | 1226 | w2_zp: Optional[torch.Tensor] = None, |
1230 | 1227 | a1_scale: Optional[torch.Tensor] = None, |
1231 | 1228 | a2_scale: Optional[torch.Tensor] = None, |
1232 | | - block_shape: Optional[List[int]] = None, #noqa: UP006 |
| 1229 | + block_shape: Optional[list[int]] = None, |
1233 | 1230 | w1_bias: Optional[torch.Tensor] = None, |
1234 | 1231 | w2_bias: Optional[torch.Tensor] = None, |
1235 | 1232 | ) -> None: |
@@ -1263,7 +1260,7 @@ def inplace_fused_experts_fake( |
1263 | 1260 | w2_zp: Optional[torch.Tensor] = None, |
1264 | 1261 | a1_scale: Optional[torch.Tensor] = None, |
1265 | 1262 | a2_scale: Optional[torch.Tensor] = None, |
1266 | | - block_shape: Optional[List[int]] = None, #noqa: UP006 |
| 1263 | + block_shape: Optional[list[int]] = None, |
1267 | 1264 | w1_bias: Optional[torch.Tensor] = None, |
1268 | 1265 | w2_bias: Optional[torch.Tensor] = None, |
1269 | 1266 | ) -> None: |
@@ -1302,7 +1299,7 @@ def outplace_fused_experts( |
1302 | 1299 | w2_zp: Optional[torch.Tensor] = None, |
1303 | 1300 | a1_scale: Optional[torch.Tensor] = None, |
1304 | 1301 | a2_scale: Optional[torch.Tensor] = None, |
1305 | | - block_shape: Optional[List[int]] = None, #noqa: UP006 |
| 1302 | + block_shape: Optional[list[int]] = None, |
1306 | 1303 | w1_bias: Optional[torch.Tensor] = None, |
1307 | 1304 | w2_bias: Optional[torch.Tensor] = None, |
1308 | 1305 | ) -> torch.Tensor: |
|
0 commit comments