From 1245b269cb63cc0f470667236f09e899dd35d01e Mon Sep 17 00:00:00 2001 From: Andrei-Aksionov <58434077+Andrei-Aksionov@users.noreply.github.com> Date: Tue, 16 Apr 2024 19:28:35 +0300 Subject: [PATCH] Rerun test_tokenizer on fail (#1307) --- tests/test_tokenizer.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index f9aede3921..b7e4d0b33f 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -11,6 +11,7 @@ from litgpt.tokenizer import Tokenizer +@pytest.mark.flaky(reruns=5) @pytest.mark.parametrize("config", config_module.configs, ids=[c["hf_config"]["name"] for c in config_module.configs]) def test_tokenizer_against_hf(config): access_token = os.getenv("HF_TOKEN")