Skip to content

Commit

Permalink
gate sparsity tests by presence of cusparselt (#1602)
Browse files Browse the repository at this point in the history
Summary:

I have a PyTorch build without `cuSparseLt`. Adding logic to properly
skip tests which depend on this library being available.

Test Plan:

Local testing on an H100 without cuSparseLt:

```
pytest test/prototype/test_sparse_api.py -s
```

Reviewers:

Subscribers:

Tasks:

Tags:
  • Loading branch information
vkuzo authored Jan 23, 2025
1 parent 166a357 commit 602ba86
Showing 1 changed file with 9 additions and 2 deletions.
11 changes: 9 additions & 2 deletions test/dtypes/test_affine_quantized.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@
is_sm_at_least_89,
)

is_cusparselt_available = (
hasattr(torch.backends, "cusparselt") and torch.backends.cusparselt.is_available()
)


def get_quantization_functions(
do_sparse: bool, do_int4: bool, device: str = "cuda", int4_zp_int: bool = False
Expand Down Expand Up @@ -91,7 +95,8 @@ def test_tensor_core_layout_transpose(self):

@unittest.skipIf(not torch.cuda.is_available(), "Need CUDA available")
@common_utils.parametrize(
"apply_quant", get_quantization_functions(True, True, "cuda", True)
"apply_quant",
get_quantization_functions(is_cusparselt_available, True, "cuda", True),
)
def test_weights_only(self, apply_quant):
linear = torch.nn.Linear(128, 256, dtype=torch.bfloat16, device="cuda")
Expand Down Expand Up @@ -168,7 +173,9 @@ def apply_uint6_weight_only_quant(linear):

deregister_aqt_quantized_linear_dispatch(dispatch_condition)

@common_utils.parametrize("apply_quant", get_quantization_functions(True, True))
@common_utils.parametrize(
"apply_quant", get_quantization_functions(is_cusparselt_available, True)
)
@unittest.skipIf(not torch.cuda.is_available(), "Need CUDA available")
def test_print_quantized_module(self, apply_quant):
linear = torch.nn.Linear(128, 256, dtype=torch.bfloat16, device="cuda")
Expand Down

0 comments on commit 602ba86

Please sign in to comment.