+
Skip to content

Conversation

akihironitta
Copy link
Member

Skips two test cases due to test case failures due to pytorch/pytorch#130619:

__________________________ test_index_sort[device0] ___________________________

device = device(type='cpu')

    @pytest.mark.parametrize('device', DEVICES)
    def test_index_sort(device):
        inputs = torch.randperm(100_000, device=device)
        ref_sorted_input, ref_indices = torch.sort(inputs, stable=True)
        sorted_input, indices = pyg_lib.ops.index_sort(inputs)
>       assert torch.all(ref_sorted_input == sorted_input)
E       assert tensor(False)
E        +  where tensor(False) = <built-in method all of type object at 0x00007FFFB4FFEE80>(tensor([    0, 11966,  7382,  ..., 24002,  2670, 16435]) == tensor([    0,     1,     2,  ..., 99997, 99998, 99999])
E        +    where <built-in method all of type object at 0x00007FFFB4FFEE80> = torch.all
E           
E           Full diff:
E           - tensor([    0,     1,     2,  ..., 99997, 99998, 99999])
E           + tensor([    0, 11966,  7382,  ..., 24002,  2670, 16435]))

test\ops\test_index_sort.py:16: AssertionError
 ________________________ test_softmax_csr_autograd[0] _________________________

dim = 0

    @pytest.mark.parametrize('dim', list(range(3)))
    def test_softmax_csr_autograd(dim):
        sizes = (16, 32, 64)
        src1 = torch.rand(sizes, requires_grad=True)
        src2 = src1.detach().clone()
        src2.requires_grad = True
        dim_size = sizes[dim]
        ptr = torch.tensor([0, 1, 4, 5, dim_size - 1, dim_size])
        out_grad = torch.randn(sizes)
    
        expected_out = softmax_reference(src1, ptr, dim)
        out = pyg_lib.ops.softmax_csr(src2, ptr, dim)
>       assert torch.allclose(expected_out, out, atol=1e-6)
E       assert False
E        +  where False = <built-in method allclose of type object at 0x00007FFFB4FFEE80>(tensor([[[0.1250, 0.1250, 0.1250,  ..., 0.1250, 0.1250, 0.1250],\n         [0.1250, 0.1250, 0.1250,  ..., 0.1250, 0.1250, 0.1250],\n         [0.1250, 0.1250, 0.1250,  ..., 0.1250, 0.1250, 0.1250],\n         ...,\n         [0.1250, 0.1250, 0.1250,  ..., 0.1250, 0.1250, 0.1250],\n         [0.1250, 0.1250, 0.1250,  ..., 0.1250, 0.1250, 0.1250],\n         [0.1250, 0.1250, 0.1250,  ..., 0.1250, 0.1250, 0.1250]],\n\n        [[   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         ...,\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf]],\n\n        [[   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         ...,\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf],\n         [   inf,    inf,    inf,  ...,    inf,    inf,    inf]],\n\n        ...,\n\n        [[0.1112, 0.1154, 0.1628,  ..., 0.2388, 0.1397, 0.1385],\n         [0.2727, 0.1811, 0.1359,  ..., 0.1575, 0.1239, 0.1146],\n         [0.1527, 0.2101, 0.1511,  ..., 0.2917, 0.2265, 0.1673],\n         ...,\n         [0.1504, 0.0841, 0.2485,  ..., 0.1421, 0.1068, 0.1552],\n         [0.1738, 0.2317, 0.2033,  ..., 0.1912, 0.1105, 0.1442],\n         [0.1625, 0.2572, 0.1764,  ..., 0.1542, 0.1133, 0.1440]],\n\n        [[0.2061, 0.2132, 0.2313,  ..., 0.1056, 0.2529, 0.1111],\n         [0.2058, 0.1829, 0.2990,  ..., 0.1247, 0.1509, 0.2046],\n         [0.2379, 0.2679, 0.1090,  ..., 0.2855, 0.1633, 0.1737],\n         ...,\n         [0.1331, 0.1018, 0.1329,  ..., 0.1130, 0.1155, 0.1681],\n         [0.1935, 0.1238, 0.2346,  ..., 0.1846, 0.1407, 0.1730],\n         [0.2061, 0.1988, 0.2449,  ..., 0.1516, 0.1032, 0.1285]],\n\n        [[1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         ...,\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000]]],\n       grad_fn=<DivBackward0>), tensor([[[1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         ...,\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000]],\n\n        [[0.2417, 0.2579, 0.4961,  ..., 0.3641, 0.2242, 0.4179],\n         [0.3829, 0.3277, 0.3676,  ..., 0.3801, 0.2794, 0.2856],\n         [0.3129, 0.2644, 0.5144,  ..., 0.3335, 0.2922, 0.4699],\n         ...,\n         [0.3454, 0.3659, 0.3029,  ..., 0.4260, 0.2055, 0.5255],\n         [0.2670, 0.2713, 0.3273,  ..., 0.4415, 0.3480, 0.4439],\n         [0.3507, 0.2773, 0.3717,  ..., 0.4621, 0.3282, 0.4235]],\n\n        [[0.4173, 0.3118, 0.2460,  ..., 0.3144, 0.3545, 0.2162],\n         [0.4176, 0.2632, 0.2321,  ..., 0.2850, 0.3394, 0.4088],\n         [0.2204, 0.2341, 0.2112,  ..., 0.4103, 0.4245, 0.3126],\n         ...,\n         [0.4126, 0.1978, 0.2525,  ..., 0.3116, 0.4507, 0.2546],\n         [0.3021, 0.3805, 0.3337,  ..., 0.2540, 0.2477, 0.2672],\n         [0.3875, 0.2745, 0.2679,  ..., 0.2447, 0.3154, 0.2707]],\n\n        ...,\n\n        [[0.0621, 0.0659, 0.0977,  ..., 0.1387, 0.0852, 0.0911],\n         [0.1352, 0.1012, 0.0754,  ..., 0.0990, 0.0800, 0.0710],\n         [0.0897, 0.1091, 0.0916,  ..., 0.1439, 0.1281, 0.1022],\n         ...,\n         [0.0973, 0.0588, 0.1368,  ..., 0.0836, 0.0691, 0.1009],\n         [0.1036, 0.1466, 0.1165,  ..., 0.1123, 0.0659, 0.0844],\n         [0.0937, 0.1372, 0.0980,  ..., 0.0893, 0.0755, 0.0829]],\n\n        [[0.1151, 0.1217, 0.1388,  ..., 0.0613, 0.1542, 0.0731],\n         [0.1021, 0.1022, 0.1658,  ..., 0.0784, 0.0975, 0.1266],\n         [0.1398, 0.1391, 0.0661,  ..., 0.1408, 0.0923, 0.1061],\n         ...,\n         [0.0861, 0.0712, 0.0732,  ..., 0.0665, 0.0748, 0.1093],\n         [0.1153, 0.0783, 0.1344,  ..., 0.1085, 0.0838, 0.1013],\n         [0.1187, 0.1060, 0.1361,  ..., 0.0879, 0.0688, 0.0740]],\n\n        [[1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         ...,\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000],\n         [1.0000, 1.0000, 1.0000,  ..., 1.0000, 1.0000, 1.0000]]],\n       grad_fn=<SoftmaxCSR>>), atol=1e-06)
E        +    where <built-in method allclose of type object at 0x00007FFFB4FFEE80> = torch.allclose

test\ops\test_softmax.py:47: AssertionError

@akihironitta akihironitta changed the title Skip a test case on PyTorch 2.4.0 on Windows Skip test cases on CPU Windows due to PyTorch 2.4.0 bug Jun 15, 2025
@akihironitta akihironitta merged commit 383ee90 into master Jun 15, 2025
117 of 128 checks passed
@akihironitta akihironitta deleted the aki/skip-pt24 branch June 15, 2025 20:57
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Projects

None yet

Development

Successfully merging this pull request may close these issues.

1 participant

点击 这是indexloc提供的php浏览器服务,不要输入任何密码和下载