diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f25a460747..bb43f15d200 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -54,6 +54,7 @@ - Treat an empty `subset` (e.g. `[]`) as if it specified all columns instead of no columns. - Raise a `TypeError` for a scalar `subset` instead of filtering on just that column. - Raise a `ValueError` for a `subset` of type `pandas.Index` instead of filtering on the columns in the index. +- Disable creation of scoped read only table to mitigate Disable creation of scoped read only table to mitigate `TableNotFoundError` when using dynamic pivot in notebook environment. #### Improvements - Improve np.where with scalar x value by eliminating unnecessary join and temp table creation. diff --git a/src/snowflake/snowpark/modin/plugin/_internal/utils.py b/src/snowflake/snowpark/modin/plugin/_internal/utils.py index 1366a79918b..1c929b0de31 100644 --- a/src/snowflake/snowpark/modin/plugin/_internal/utils.py +++ b/src/snowflake/snowpark/modin/plugin/_internal/utils.py @@ -273,7 +273,7 @@ def _create_read_only_table( readonly_table_name = ( f"{random_name_for_temp_object(TempObjectType.TABLE)}{READ_ONLY_TABLE_SUFFIX}" ) - use_scoped_temp_table = session._use_scoped_temp_objects + use_scoped_temp_table = session._use_scoped_temp_read_only_table # If we need to materialize into a temp table our create table expression # needs to be SELECT * FROM (object). if materialize_into_temp_table: diff --git a/src/snowflake/snowpark/session.py b/src/snowflake/snowpark/session.py index 35e4a6855a1..d357b4f2c0a 100644 --- a/src/snowflake/snowpark/session.py +++ b/src/snowflake/snowpark/session.py @@ -238,6 +238,10 @@ _PYTHON_SNOWPARK_ENABLE_THREAD_SAFE_SESSION = ( "PYTHON_SNOWPARK_ENABLE_THREAD_SAFE_SESSION" ) +# Flag for controlling the usage of scoped temp read only table. +_PYTHON_SNOWPARK_ENABLE_SCOPED_TEMP_READ_ONLY_TABLE = ( + "PYTHON_SNOWPARK_ENABLE_SCOPED_TEMP_READ_ONLY_TABLE" +) # The complexity score lower bound is set to match COMPILATION_MEMORY_LIMIT # in Snowflake. This is the limit where we start seeing compilation errors. DEFAULT_COMPLEXITY_SCORE_LOWER_BOUND = 10_000_000 @@ -541,6 +545,11 @@ def __init__( _PYTHON_SNOWPARK_USE_SCOPED_TEMP_OBJECTS_STRING, True ) ) + self._use_scoped_temp_read_only_table: bool = ( + self._conn._get_client_side_session_parameter( + _PYTHON_SNOWPARK_ENABLE_SCOPED_TEMP_READ_ONLY_TABLE, False + ) + ) self._file = FileOperation(self) self._lineage = Lineage(self) self._sql_simplifier_enabled: bool = ( diff --git a/tests/integ/modin/io/test_read_snowflake.py b/tests/integ/modin/io/test_read_snowflake.py index 2e6f8cdf6e6..494755f84ee 100644 --- a/tests/integ/modin/io/test_read_snowflake.py +++ b/tests/integ/modin/io/test_read_snowflake.py @@ -39,10 +39,10 @@ @pytest.fixture(params=paramList) def setup_use_scoped_object(request, session): - use_scoped_objects = session._use_scoped_temp_objects - session._use_scoped_temp_objects = request.param + use_scoped_objects = session._use_scoped_temp_read_only_table + session._use_scoped_temp_read_only_table = request.param yield - session._use_scoped_temp_objects = use_scoped_objects + session._use_scoped_temp_read_only_table = use_scoped_objects def read_snowflake_and_verify_snapshot_creation( @@ -85,7 +85,7 @@ def read_snowflake_and_verify_snapshot_creation( assert len(query_history.queries) == 1 # test if the scoped snapshot is created - scoped_pattern = " SCOPED " if session._use_scoped_temp_objects else " " + scoped_pattern = " SCOPED " if session._use_scoped_temp_read_only_table else " " table_create_sql = query_history.queries[-1].sql_text table_create_pattern = f"CREATE OR REPLACE{scoped_pattern}TEMPORARY READ ONLY TABLE SNOWPARK_TEMP_TABLE_[0-9A-Z]+.*{READ_ONLY_TABLE_SUFFIX}.*" assert re.match(table_create_pattern, table_create_sql) is not None