Skip to content

Commit

Permalink
Update skip list after reevaluating these failures (#678)
Browse files Browse the repository at this point in the history
open test_symnode_hashing and
test_cpu_gpu_parity_nn_CrossEntropyLoss_xpu_float16 as they are passed
now.

Co-authored-by: Huaiyu, Zheng <[email protected]>
Co-authored-by: Feng Yuan <[email protected]>
  • Loading branch information
3 people authored Aug 4, 2024
1 parent ba91ab9 commit d866d5f
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 11 deletions.
1 change: 1 addition & 0 deletions test/xpu/extended/run_test_with_skip.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"test_compare_cpu_tanh_xpu_complex128",
"test_compare_cpu_tanh_xpu_complex64",
"test_compare_cpu_rsqrt_xpu_bfloat16",
# cuda has the same issue on this case
"test_compare_cpu__refs_rsub_xpu_bfloat16",
"test_compare_cpu_add_xpu_bfloat16",
"test_compare_cpu_sub_xpu_bfloat16",
Expand Down
12 changes: 1 addition & 11 deletions test/xpu/run_test_with_skip.py
Original file line number Diff line number Diff line change
Expand Up @@ -1146,9 +1146,6 @@ def launch_test(test_case, skip_list=None, exe_list=None):
"test_memory_format_nn_ConvTranspose2d_xpu_float64",
"test_memory_format_nn_LazyConv2d_xpu_float64",
"test_memory_format_nn_LazyConvTranspose2d_xpu_float64",
# CPU fallback fails
# AssertionError: Tensor-likes are not close!
"test_cpu_gpu_parity_nn_CrossEntropyLoss_xpu_float16",
# CPU fallback could not cover these
# CUDA xfails
# Failed: Unexpected success
Expand Down Expand Up @@ -2979,14 +2976,7 @@ def launch_test(test_case, skip_list=None, exe_list=None):
res += launch_test("nn/test_convolution_xpu.py", skip_list)

# test_dynamic_shapes

skip_list = (
# Regression after PyTorch uplift
# https://github.com/intel/torch-xpu-ops/issues/549
# AssertionError: 3 != 3.0
"test_symnode_hashing",
)
res += launch_test("test_dynamic_shapes_xpu.py", skip_list)
res += launch_test("test_dynamic_shapes_xpu.py")

# test_load_state_dict

Expand Down

0 comments on commit d866d5f

Please sign in to comment.