From ae9cbf6405f56f9bb2e04d1f42b24ebf236d12b2 Mon Sep 17 00:00:00 2001 From: Farooq Khan <105917259+khanfs@users.noreply.github.com> Date: Mon, 13 Apr 2026 16:06:36 +0100 Subject: [PATCH] test: add regression test for CPU autocast device-type behavior --- tests/test_autocast_precision.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 tests/test_autocast_precision.py diff --git a/tests/test_autocast_precision.py b/tests/test_autocast_precision.py new file mode 100644 index 000000000..2c1f69b39 --- /dev/null +++ b/tests/test_autocast_precision.py @@ -0,0 +1,29 @@ +import torch + + +def test_cpu_autocast_disable_must_use_active_device_type(): + """ + Regression test for CPU autocast behavior. + + Inside a CPU autocast context, disabling autocast with device_type="cuda" + does not actually disable CPU autocast. Disabling with device_type="cpu" + does. + """ + with torch.autocast("cpu", dtype=torch.bfloat16): + x = torch.randn(4, 4) + y = torch.randn(4, 4) + + # Wrong pattern: does not disable CPU autocast + with torch.autocast("cuda", enabled=False): + wrong = torch.matmul(x.float(), y.float()) + + # Correct pattern: disables CPU autocast + with torch.autocast("cpu", enabled=False): + correct = torch.matmul(x.float(), y.float()) + + assert wrong.dtype == torch.bfloat16, ( + "Using device_type='cuda' should leave CPU autocast enabled in this context" + ) + assert correct.dtype == torch.float32, ( + "Using device_type='cpu' should disable CPU autocast in this context" + )