Skip to content

Commit c595bd3

Browse files
committed
fix lint
1 parent a64b139 commit c595bd3

File tree

3 files changed

+8
-6
lines changed

3 files changed

+8
-6
lines changed

setup.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -283,8 +283,9 @@ def get_flash_attention3_nvcc_archs_flags(cuda_version: int):
283283
return []
284284
if cuda_version < 1203:
285285
return []
286-
if ((sys.platform == "win32" or platform.system() == "Windows")
287-
and cuda_version >= 1300):
286+
if (
287+
sys.platform == "win32" or platform.system() == "Windows"
288+
) and cuda_version >= 1300:
288289
return []
289290
archs_list = os.environ.get("TORCH_CUDA_ARCH_LIST")
290291
if archs_list is None:
@@ -503,9 +504,7 @@ def get_extensions():
503504
if cuda_version < 1205:
504505
# swiglu_fairinternal.cu uses cuda::ptx::cp_async_bulk which requires
505506
# CUDA 12.5
506-
sources.remove(
507-
os.path.join(extensions_dir, "swiglu_fairinternal.cu")
508-
)
507+
sources.remove(os.path.join(extensions_dir, "swiglu_fairinternal.cu"))
509508
include_dirs += [
510509
sputnik_dir,
511510
cutlass_dir,

tests/multiprocessing_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
# This source code is licensed under the BSD license found in the
44
# LICENSE file in the root directory of this source tree.
55

6-
import concurrent
6+
import concurrent.futures
77
import gc
88
import multiprocessing
99
import os

xformers/ops/fmha/ck.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,9 @@ def _get_seqlen_info(
8282

8383
if isinstance(attn_bias, PagedBlockDiagonalGappyKeysMask):
8484
seqstart_k = attn_bias.k_seqinfo.seqstart[:-1]
85+
assert (
86+
seqlen is not None
87+
), "seqlen must not be None for PagedBlockDiagonalGappyKeysMask"
8588
seqlen = seqlen - seqstart_k
8689

8790
return seqstart_k, seqstart_q, seqlen, max_seqlen_q, max_seqlen_k

0 commit comments

Comments
 (0)