diff --git a/build_tools/ci/test_posix.sh b/build_tools/ci/test_posix.sh index 4e5ebc008dd0..3a9f5b7afa61 100755 --- a/build_tools/ci/test_posix.sh +++ b/build_tools/ci/test_posix.sh @@ -8,6 +8,10 @@ torch_version="${1:-unknown}" export PYTHONPATH="$repo_root/build/tools/torch-mlir/python_packages/torch_mlir:$repo_root/projects/pt1" +echo "::group::Run ONNX e2e integration tests" +python -m e2e_testing.main --config=onnx -v +echo "::endgroup::" + case $torch_version in nightly) # Failing with: NotImplementedError: diff --git a/projects/pt1/e2e_testing/main.py b/projects/pt1/e2e_testing/main.py index 952795d555bc..1261d74d73ff 100644 --- a/projects/pt1/e2e_testing/main.py +++ b/projects/pt1/e2e_testing/main.py @@ -20,6 +20,7 @@ from torch_mlir_e2e_test.configs import ( LazyTensorCoreTestConfig, NativeTorchTestConfig, + OnnxBackendTestConfig, TorchScriptTestConfig, FxImporterTestConfig, ) @@ -70,6 +71,8 @@ def _get_argparse(): "native_torch", "torchscript", "lazy_tensor_core", + "onnx", + "onnx_tosa", "fx_importer", "fx_importer_stablehlo", "fx_importer_tosa", @@ -89,11 +92,11 @@ def _get_argparse(): "torchscript": compile the model to a torch.jit.ScriptModule, and then run that as-is (useful for verifying TorchScript is modeling the program correctly). "lazy_tensor_core": run the model through the Lazy Tensor Core frontend and execute the traced graph. # "torchdynamo": run the model through the TorchDynamo frontend and execute the graph using Linalg-on-Tensors. -# "onnx": export to the model via onnx and reimport using the torch-onnx-to-torch path. +"onnx": export to the model via onnx and reimport using the torch-onnx-to-torch path. "fx_importer": run the model through the fx importer frontend and execute the graph using Linalg-on-Tensors. "fx_importer_stablehlo": run the model through the fx importer frontend and execute the graph using Stablehlo backend. "fx_importer_tosa": run the model through the fx importer frontend and execute the graph using the TOSA backend. -# "onnx_tosa": Import ONNX to Torch via the torch-onnx-to-torch path and execute the graph using the TOSA backend. +"onnx_tosa": Import ONNX to Torch via the torch-onnx-to-torch path and execute the graph using the TOSA backend. """, ) parser.add_argument( diff --git a/projects/pt1/python/torch_mlir_e2e_test/configs/__init__.py b/projects/pt1/python/torch_mlir_e2e_test/configs/__init__.py index daf712295e30..b0fcbd3264ca 100644 --- a/projects/pt1/python/torch_mlir_e2e_test/configs/__init__.py +++ b/projects/pt1/python/torch_mlir_e2e_test/configs/__init__.py @@ -5,5 +5,6 @@ from .lazy_tensor_core import LazyTensorCoreTestConfig from .native_torch import NativeTorchTestConfig +from .onnx_backend import OnnxBackendTestConfig from .torchscript import TorchScriptTestConfig from .fx_importer_backend import FxImporterTestConfig