Skip to content

Commit

Permalink
SNOW-759276 Add xfail to known flaky tests (snowflakedb#724)
Browse files Browse the repository at this point in the history
* SNOW-759276 Add xfail to known flaky tests

Description

Add xfail to known flaky tests to make merge gate stable, the
fix will be in separate PRs.

There are two tests that are treated differently:

1. Doctest snowpark.async_job.AsyncJob. I don't see a good reason
for this "two async runs faster than two sync" as a useful doc.
2. Removed a stored proc skip because the reason for skip is flaky

Testing

Existing tests

* add another flaky test
  • Loading branch information
sfc-gh-sfan authored Mar 14, 2023
1 parent 0843769 commit b861103
Show file tree
Hide file tree
Showing 5 changed files with 6 additions and 21 deletions.
19 changes: 0 additions & 19 deletions src/snowflake/snowpark/async_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,25 +125,6 @@ class AsyncJob:
>>> async_job.cancel()
Example 9
Executing two queries asynchronously is faster than executing two queries one by one::
>>> from time import time
>>> df1 = session.sql("select SYSTEM$WAIT(3)")
>>> df2 = session.sql("select SYSTEM$WAIT(3)")
>>> start = time()
>>> sync_res1 = df1.collect()
>>> sync_res2 = df2.collect()
>>> time1 = time() - start
>>> start = time()
>>> async_job1 = df1.collect_nowait()
>>> async_job2 = df2.collect_nowait()
>>> async_res1 = async_job1.result()
>>> async_res2 = async_job2.result()
>>> time2 = time() - start
>>> time2 < time1
True
Example 10
Creating an :class:`AsyncJob` from an existing query ID, retrieving results and converting it back to a :class:`DataFrame`:
>>> from snowflake.snowpark.functions import col
Expand Down
1 change: 1 addition & 0 deletions tests/integ/scala/test_async_job_suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,6 +396,7 @@ def test_create_async_job_negative(session):
async_job.result()


@pytest.mark.xfail(reason="SNOW-754115 flaky test", strict=False)
@pytest.mark.parametrize("create_async_job_from_query_id", [True, False])
def test_get_query_from_async_job(session, create_async_job_from_query_id):
query_text = "select 1, 2, 3"
Expand Down
3 changes: 1 addition & 2 deletions tests/integ/scala/test_large_dataframe_suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,9 @@
TimeType,
VariantType,
)
from tests.utils import IS_IN_STORED_PROC


@pytest.mark.skipif(IS_IN_STORED_PROC, reason="flaky test in SP")
@pytest.mark.xfail(reason="SNOW-754118 flaky test", strict=False)
def test_to_local_iterator_should_not_load_all_data_at_once(session):
df = (
session.range(1000000)
Expand Down
3 changes: 3 additions & 0 deletions tests/integ/scala/test_query_tag_suite.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ def test_query_tags_in_session(session):
Utils.unset_query_tag(session)


@pytest.mark.xfail(reason="SNOW-754166 flaky test", strict=False)
@pytest.mark.parametrize(
"code",
[
Expand Down Expand Up @@ -96,6 +97,7 @@ def test_query_tags_from_trackback(session, code):
assert len(query_history) == 1


@pytest.mark.xfail(reason="SNOW-759410 flaky test", strict=False)
@pytest.mark.parametrize("data", ["a", "'a'", "\\a", "a\n", r"\ua", " a", '"a'])
def test_large_local_relation_query_tag_from_traceback(session, data):
session.create_dataframe(
Expand All @@ -107,6 +109,7 @@ def test_large_local_relation_query_tag_from_traceback(session, data):
assert len(query_history) > 0 # some hidden SQLs are run so it's not exactly 1.


@pytest.mark.xfail(reason="SNOW-754078 flaky test", strict=False)
def test_query_tag_for_cache_result(session):
query_tag = Utils.random_name_for_temp_object(TempObjectType.QUERY_TAG)
session.query_tag = query_tag
Expand Down
1 change: 1 addition & 0 deletions tests/integ/test_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,6 +367,7 @@ def test_use_negative_tests(session, obj):
assert err_msg in exec_info.value.args[0]


@pytest.mark.xfail(reason="SNOW-754082 flaky test", strict=False)
@pytest.mark.skipif(
IS_IN_STORED_PROC, reason="use schema is not allowed in stored proc (owner mode)"
)
Expand Down

0 comments on commit b861103

Please sign in to comment.