diff --git a/pandas/tests/io/test_sql.py b/pandas/tests/io/test_sql.py index 020ed80c06427..ea5693e512f7f 100644 --- a/pandas/tests/io/test_sql.py +++ b/pandas/tests/io/test_sql.py @@ -59,13 +59,11 @@ pytestmark = [ pytest.mark.filterwarnings( "ignore:Passing a BlockManager to DataFrame:DeprecationWarning" - ), - pytest.mark.single_cpu, + ) ] -def table_uuid_gen(prefix: str) -> str: - """Generate a unique table name with context prefix.""" +def create_unique_table_name(prefix: str) -> str: return f"{prefix}_{uuid.uuid4().hex}" @@ -987,7 +985,7 @@ def sqlite_buildin_types(sqlite_buildin, types_data): @pytest.mark.parametrize("conn", all_connectable) def test_dataframe_to_sql(conn, test_frame1, request): # GH 51086 if conn is sqlite_engine - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") conn = request.getfixturevalue(conn) test_frame1.to_sql(name=table_uuid, con=conn, if_exists="append", index=False) @@ -1003,7 +1001,7 @@ def test_dataframe_to_sql_empty(conn, test_frame1, request): # GH 51086 if conn is sqlite_engine conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") empty_df = test_frame1.iloc[:0] empty_df.to_sql(name=table_uuid, con=conn, if_exists="append", index=False) @@ -1038,7 +1036,7 @@ def test_dataframe_to_sql_arrow_dtypes(conn, request): msg = "the 'timedelta'" conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_arrow") + table_uuid = create_unique_table_name("test_arrow") with tm.assert_produces_warning(exp_warning, match=msg, check_stacklevel=False): df.to_sql(name=table_uuid, con=conn, if_exists="replace", index=False) @@ -1055,7 +1053,7 @@ def test_dataframe_to_sql_arrow_dtypes_missing(conn, request, nulls_fixture): } ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_arrow") + table_uuid = create_unique_table_name("test_arrow") df.to_sql(name=table_uuid, con=conn, if_exists="replace", index=False) @@ -1070,7 +1068,7 @@ def test_to_sql(conn, method, test_frame1, request): ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") with pandasSQL_builder(conn, need_transaction=True) as pandasSQL: pandasSQL.to_sql(test_frame1, table_uuid, method=method) assert pandasSQL.has_table(table_uuid) @@ -1081,7 +1079,7 @@ def test_to_sql(conn, method, test_frame1, request): @pytest.mark.parametrize("mode, num_row_coef", [("replace", 1), ("append", 2)]) def test_to_sql_exist(conn, mode, num_row_coef, test_frame1, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") with pandasSQL_builder(conn, need_transaction=True) as pandasSQL: pandasSQL.to_sql(test_frame1, table_uuid, if_exists="fail") pandasSQL.to_sql(test_frame1, table_uuid, if_exists=mode) @@ -1092,7 +1090,7 @@ def test_to_sql_exist(conn, mode, num_row_coef, test_frame1, request): @pytest.mark.parametrize("conn", all_connectable) def test_to_sql_exist_fail(conn, test_frame1, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") with pandasSQL_builder(conn, need_transaction=True) as pandasSQL: pandasSQL.to_sql(test_frame1, table_uuid, if_exists="fail") assert pandasSQL.has_table(table_uuid) @@ -1207,7 +1205,7 @@ def test_read_iris_table_chunksize(conn, request): @pytest.mark.parametrize("conn", sqlalchemy_connectable) def test_to_sql_callable(conn, test_frame1, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") check = [] # used to double check function below is really being used @@ -1258,7 +1256,7 @@ def test_default_type_conversion(conn, request): @pytest.mark.parametrize("conn", mysql_connectable) def test_read_procedure(conn, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") # GH 7324 # Although it is more an api test, it is added to the @@ -1319,7 +1317,7 @@ def psql_insert_copy(table, conn, keys, data_iter): return expected_count conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") expected = DataFrame({"col1": [1, 2], "col2": [0.1, 0.2], "col3": ["a", "n"]}) result_count = expected.to_sql( name=table_uuid, con=conn, index=False, method=psql_insert_copy @@ -1337,7 +1335,7 @@ def psql_insert_copy(table, conn, keys, data_iter): def test_insertion_method_on_conflict_do_nothing(conn, request): # GH 15988: Example in to_sql docstring conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_insert_conflict") + table_uuid = create_unique_table_name("test_insert_conflict") from sqlalchemy.dialects.postgresql import insert from sqlalchemy.engine import Engine @@ -1400,7 +1398,7 @@ def test_to_sql_on_public_schema(conn, request): ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_public_schema") + table_uuid = create_unique_table_name("test_public_schema") test_data = DataFrame([[1, 2.1, "a"], [2, 3.1, "b"]], columns=list("abc")) test_data.to_sql( @@ -1419,7 +1417,7 @@ def test_to_sql_on_public_schema(conn, request): def test_insertion_method_on_conflict_update(conn, request): # GH 14553: Example in to_sql docstring conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_insert_conflict") + table_uuid = create_unique_table_name("test_insert_conflict") from sqlalchemy.dialects.mysql import insert from sqlalchemy.engine import Engine @@ -1477,8 +1475,8 @@ def test_read_view_postgres(conn, request): from sqlalchemy.engine import Engine from sqlalchemy.sql import text - table_name = table_uuid_gen("group") - view_name = table_uuid_gen("group_view") + table_name = create_unique_table_name("group") + view_name = create_unique_table_name("group_view") sql_stmt = text( f""" @@ -1507,8 +1505,8 @@ def test_read_view_postgres(conn, request): def test_read_view_sqlite(sqlite_buildin): # GH 52969 - table_uuid = table_uuid_gen("groups") - view_uuid = table_uuid_gen("group_view") + table_uuid = create_unique_table_name("groups") + view_uuid = create_unique_table_name("group_view") create_table = f""" CREATE TABLE {table_uuid} ( @@ -1625,7 +1623,7 @@ def test_api_read_sql_with_chunksize_no_result(conn, request): @pytest.mark.parametrize("conn", all_connectable) def test_api_to_sql(conn, request, test_frame1): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame1") + table_uuid = create_unique_table_name("test_frame1") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1637,7 +1635,7 @@ def test_api_to_sql(conn, request, test_frame1): @pytest.mark.parametrize("conn", all_connectable) def test_api_to_sql_fail(conn, request, test_frame1): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame2") + table_uuid = create_unique_table_name("test_frame2") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1653,7 +1651,7 @@ def test_api_to_sql_fail(conn, request, test_frame1): @pytest.mark.parametrize("conn", all_connectable) def test_api_to_sql_replace(conn, request, test_frame1): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame3") + table_uuid = create_unique_table_name("test_frame3") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1672,7 +1670,7 @@ def test_api_to_sql_replace(conn, request, test_frame1): @pytest.mark.parametrize("conn", all_connectable) def test_api_to_sql_append(conn, request, test_frame1): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame4") + table_uuid = create_unique_table_name("test_frame4") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1692,7 +1690,7 @@ def test_api_to_sql_append(conn, request, test_frame1): @pytest.mark.parametrize("conn", all_connectable) def test_api_to_sql_type_mapping(conn, request, test_frame3): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame5") + table_uuid = create_unique_table_name("test_frame5") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1706,7 +1704,7 @@ def test_api_to_sql_type_mapping(conn, request, test_frame3): @pytest.mark.parametrize("conn", all_connectable) def test_api_to_sql_series(conn, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_series") + table_uuid = create_unique_table_name("test_series") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1721,7 +1719,7 @@ def test_api_to_sql_series(conn, request): def test_api_roundtrip(conn, request, test_frame1): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame_roundtrip") + table_uuid = create_unique_table_name("test_frame_roundtrip") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1744,7 +1742,7 @@ def test_api_roundtrip_chunksize(conn, request, test_frame1): pytest.mark.xfail(reason="chunksize argument NotImplemented with ADBC") ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame_roundtrip") + table_uuid = create_unique_table_name("test_frame_roundtrip") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1904,7 +1902,7 @@ def test_api_timedelta(conn, request): # see #6921 conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_timedelta") + table_uuid = create_unique_table_name("test_timedelta") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -1951,7 +1949,7 @@ def test_api_timedelta(conn, request): def test_api_complex_raises(conn, request): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_complex") + table_uuid = create_unique_table_name("test_complex") df = DataFrame({"a": [1 + 1j, 2j]}) if "adbc" in conn_name: @@ -1986,7 +1984,7 @@ def test_api_to_sql_index_label(conn, request, index_name, index_label, expected pytest.mark.xfail(reason="index_label argument NotImplemented with ADBC") ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_index_label") + table_uuid = create_unique_table_name("test_index_label") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2014,7 +2012,7 @@ def test_api_to_sql_index_label_multiindex(conn, request): ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_index_label") + table_uuid = create_unique_table_name("test_index_label") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2077,7 +2075,7 @@ def test_api_to_sql_index_label_multiindex(conn, request): @pytest.mark.parametrize("conn", all_connectable) def test_api_multiindex_roundtrip(conn, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_multiindex_roundtrip") + table_uuid = create_unique_table_name("test_multiindex_roundtrip") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2109,7 +2107,7 @@ def test_api_dtype_argument(conn, request, dtype): # GH10285 Add dtype argument to read_sql_query conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_dtype_argument") + table_uuid = create_unique_table_name("test_dtype_argument") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2131,7 +2129,7 @@ def test_api_dtype_argument(conn, request, dtype): @pytest.mark.parametrize("conn", all_connectable) def test_api_integer_col_names(conn, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame_integer_col_names") + table_uuid = create_unique_table_name("test_frame_integer_col_names") df = DataFrame([[1, 2], [3, 4]], columns=[0, 1]) sql.to_sql(df, table_uuid, conn, if_exists="replace") @@ -2146,7 +2144,7 @@ def test_api_get_schema(conn, request, test_frame1): ) ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = sql.get_schema(test_frame1, table_uuid, con=conn) assert "CREATE" in create_sql @@ -2162,7 +2160,7 @@ def test_api_get_schema_with_schema(conn, request, test_frame1): ) ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = sql.get_schema(test_frame1, table_uuid, con=conn, schema="pypi") assert "CREATE TABLE pypi." in create_sql @@ -2186,7 +2184,7 @@ def test_api_get_schema_dtypes(conn, request): from sqlalchemy import Integer dtype = Integer - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = sql.get_schema(float_frame, table_uuid, con=conn, dtype={"b": dtype}) assert "CREATE" in create_sql assert "INTEGER" in create_sql @@ -2204,7 +2202,7 @@ def test_api_get_schema_keys(conn, request, test_frame1): conn_name = conn conn = request.getfixturevalue(conn) frame = DataFrame({"Col1": [1.1, 1.2], "Col2": [2.1, 2.2]}) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = sql.get_schema(frame, table_uuid, con=conn, keys="Col1") if "mysql" in conn_name: @@ -2230,7 +2228,7 @@ def test_api_chunksize_read(conn, request): ) conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_chunksize") + table_uuid = create_unique_table_name("test_chunksize") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2286,7 +2284,7 @@ def test_api_categorical(conn, request): # GH8624 # test that categorical gets written correctly as dense column conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_categorical") + table_uuid = create_unique_table_name("test_categorical") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2310,7 +2308,7 @@ def test_api_categorical(conn, request): def test_api_unicode_column_name(conn, request): # GH 11431 conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_unicode") + table_uuid = create_unique_table_name("test_unicode") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2324,7 +2322,7 @@ def test_api_escaped_table_name(conn, request): # GH 13206 conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("d1187b08-4943-4c8d-a7f6") + table_uuid = create_unique_table_name("d1187b08-4943-4c8d-a7f6") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2356,7 +2354,7 @@ def test_api_read_sql_duplicate_columns(conn, request): ) ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_table") + table_uuid = create_unique_table_name("test_table") if sql.has_table(table_uuid, conn): with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: pandasSQL.drop_table(table_uuid) @@ -2380,7 +2378,7 @@ def test_read_table_columns(conn, request, test_frame1): request.applymarker(pytest.mark.xfail(reason="Not Implemented")) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") sql.to_sql(test_frame1, table_uuid, conn) cols = ["A", "B"] @@ -2397,7 +2395,7 @@ def test_read_table_index_col(conn, request, test_frame1): request.applymarker(pytest.mark.xfail(reason="Not Implemented")) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") sql.to_sql(test_frame1, table_uuid, conn) result = sql.read_sql_table(table_uuid, conn, index_col="index") @@ -2437,8 +2435,8 @@ def test_not_reflect_all_tables(sqlite_conn): from sqlalchemy import text from sqlalchemy.engine import Engine - invalid_uuid = table_uuid_gen("invalid") - other_uuid = table_uuid_gen("other_table") + invalid_uuid = create_unique_table_name("invalid") + other_uuid = create_unique_table_name("other_table") # create invalid table query_list = [ @@ -2467,7 +2465,7 @@ def test_warning_case_insensitive_table_name(conn, request, test_frame1): request.applymarker(pytest.mark.xfail(reason="Does not raise warning")) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("table") + table_uuid = create_unique_table_name("table") table_uuid_upper = table_uuid.upper() # see gh-7815 with tm.assert_produces_warning( @@ -2483,7 +2481,7 @@ def test_warning_case_insensitive_table_name(conn, request, test_frame1): # Test that the warning is certainly NOT triggered in a normal case. with tm.assert_produces_warning(None): - case_sensitive_uuid = table_uuid_gen("CaseSensitive") + case_sensitive_uuid = create_unique_table_name("CaseSensitive") test_frame1.to_sql(name=case_sensitive_uuid, con=conn) @@ -2496,7 +2494,7 @@ def test_sqlalchemy_type_mapping(conn, request): df = DataFrame( {"time": to_datetime(["2014-12-12 01:54", "2014-12-11 02:54"], utc=True)} ) - table_uuid = table_uuid_gen("test_type") + table_uuid = create_unique_table_name("test_type") with sql.SQLDatabase(conn) as db: table = sql.SQLTable(table_uuid, db, frame=df) # GH 9086: TIMESTAMP is the suggested type for datetimes with timezones @@ -2528,7 +2526,7 @@ def test_sqlalchemy_integer_mapping(conn, request, integer, expected): # GH35076 Map pandas integer to optimal SQLAlchemy integer type conn = request.getfixturevalue(conn) df = DataFrame([0, 1], columns=["a"], dtype=integer) - table_uuid = table_uuid_gen("test_type") + table_uuid = create_unique_table_name("test_type") with sql.SQLDatabase(conn) as db: table = sql.SQLTable(table_uuid, db, frame=df) @@ -2542,7 +2540,7 @@ def test_sqlalchemy_integer_overload_mapping(conn, request, integer): conn = request.getfixturevalue(conn) # GH35076 Map pandas integer to optimal SQLAlchemy integer type df = DataFrame([0, 1], columns=["a"], dtype=integer) - table_uuid = table_uuid_gen("test_type") + table_uuid = create_unique_table_name("test_type") with sql.SQLDatabase(conn) as db: with pytest.raises( ValueError, match="Unsigned 64 bit integer datatype is not supported" @@ -2560,7 +2558,7 @@ def test_database_uri_string(conn, request, test_frame1): # "iris": syntax error [SQL: 'iris'] with tm.ensure_clean() as name: db_uri = "sqlite:///" + name - table_uuid = table_uuid_gen("iris") + table_uuid = create_unique_table_name("iris") test_frame1.to_sql( name=table_uuid, con=db_uri, if_exists="replace", index=False ) @@ -2581,7 +2579,7 @@ def test_pg8000_sqlalchemy_passthrough_error(conn, request): # using driver that will not be installed on CI to trigger error # in sqlalchemy.create_engine -> test passing of this error to user db_uri = "postgresql+pg8000://user:pass@host/dbname" - table_uuid = table_uuid_gen("table") + table_uuid = create_unique_table_name("table") with pytest.raises(ImportError, match="pg8000"): sql.read_sql(f"select * from {table_uuid}", db_uri) @@ -2626,7 +2624,7 @@ def test_column_with_percentage(conn, request): request.applymarker(pytest.mark.xfail(reason="Not Implemented")) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_column_percentage") + table_uuid = create_unique_table_name("test_column_percentage") df = DataFrame({"A": [0, 1, 2], "%_variation": [3, 4, 5]}) df.to_sql(name=table_uuid, con=conn, index=False) @@ -2640,7 +2638,7 @@ def test_sql_open_close(test_frame3): # between the writing and reading (as in many real situations). with tm.ensure_clean() as name: - table_uuid = table_uuid_gen("test_frame3_legacy") + table_uuid = create_unique_table_name("test_frame3_legacy") with closing(sqlite3.connect(name)) as conn: assert sql.to_sql(test_frame3, table_uuid, conn, index=False) == 4 @@ -2720,7 +2718,7 @@ def test_create_table(conn, request): from sqlalchemy import inspect temp_frame = DataFrame({"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]}) - table_uuid = table_uuid_gen("temp_frame") + table_uuid = create_unique_table_name("temp_frame") with sql.SQLDatabase(conn, need_transaction=True) as pandasSQL: assert pandasSQL.to_sql(temp_frame, table_uuid) == 4 @@ -2742,7 +2740,7 @@ def test_drop_table(conn, request): from sqlalchemy import inspect temp_frame = DataFrame({"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]}) - table_uuid = table_uuid_gen("temp_frame") + table_uuid = create_unique_table_name("temp_frame") with sql.SQLDatabase(conn) as pandasSQL: with pandasSQL.run_transaction(): assert pandasSQL.to_sql(temp_frame, table_uuid) == 4 @@ -2766,7 +2764,7 @@ def test_roundtrip(conn, request, test_frame1): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame_roundtrip") + table_uuid = create_unique_table_name("test_frame_roundtrip") pandasSQL = pandasSQL_builder(conn) with pandasSQL.run_transaction(): assert pandasSQL.to_sql(test_frame1, table_uuid) == 4 @@ -2844,7 +2842,7 @@ def test_sqlalchemy_default_type_conversion(conn, request): def test_bigint(conn, request): # int64 should be converted to BigInteger, GH7433 conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_bigint") + table_uuid = create_unique_table_name("test_bigint") df = DataFrame(data={"i64": [2**62]}) assert df.to_sql(name=table_uuid, con=conn, index=False) == 1 result = sql.read_sql_table(table_uuid, conn) @@ -2875,7 +2873,7 @@ def test_datetime_with_timezone_query(conn, request, parse_dates): # to datetime64[ns,psycopg2.tz.FixedOffsetTimezone..], which is ok # but should be more natural, so coerce to datetime64[ns] for now conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("datetz") + table_uuid = create_unique_table_name("datetz") expected = create_and_load_postgres_datetz(conn) # GH11216 @@ -2887,7 +2885,7 @@ def test_datetime_with_timezone_query(conn, request, parse_dates): @pytest.mark.parametrize("conn", postgresql_connectable) def test_datetime_with_timezone_query_chunksize(conn, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("datetz") + table_uuid = create_unique_table_name("datetz") expected = create_and_load_postgres_datetz(conn) df = concat( @@ -2901,7 +2899,7 @@ def test_datetime_with_timezone_query_chunksize(conn, request): @pytest.mark.parametrize("conn", postgresql_connectable) def test_datetime_with_timezone_table(conn, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("datetz") + table_uuid = create_unique_table_name("datetz") expected = create_and_load_postgres_datetz(conn) result = sql.read_sql_table(table_uuid, conn) @@ -2913,7 +2911,7 @@ def test_datetime_with_timezone_table(conn, request): def test_datetime_with_timezone_roundtrip(conn, request): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_datetime_tz") + table_uuid = create_unique_table_name("test_datetime_tz") # GH 9086 # Write datetimetz data to a db and read it back # For dbs that support timestamps with timezones, should get back UTC @@ -2945,7 +2943,7 @@ def test_datetime_with_timezone_roundtrip(conn, request): def test_out_of_bounds_datetime(conn, request): # GH 26761 conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_datetime_obb") + table_uuid = create_unique_table_name("test_datetime_obb") data = DataFrame({"date": datetime(9999, 1, 1)}, index=[0]) assert data.to_sql(name=table_uuid, con=conn, index=False) == 1 result = sql.read_sql_table(table_uuid, conn) @@ -2960,7 +2958,7 @@ def test_naive_datetimeindex_roundtrip(conn, request): # GH 23510 # Ensure that a naive DatetimeIndex isn't converted to UTC conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("foo_table") + table_uuid = create_unique_table_name("foo_table") dates = date_range("2018-01-01", periods=5, freq="6h", unit="us")._with_freq(None) expected = DataFrame({"nums": range(5)}, index=dates) assert expected.to_sql(name=table_uuid, con=conn, index_label="info_date") == 5 @@ -3005,7 +3003,7 @@ def test_date_parsing(conn, request): def test_datetime(conn, request): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_datetime") + table_uuid = create_unique_table_name("test_datetime") df = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)} ) @@ -3032,7 +3030,7 @@ def test_datetime(conn, request): def test_datetime_NaT(conn, request): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_datetime") + table_uuid = create_unique_table_name("test_datetime") df = DataFrame( {"A": date_range("2013-01-01 09:00:00", periods=3), "B": np.arange(3.0)} ) @@ -3058,7 +3056,7 @@ def test_datetime_NaT(conn, request): def test_datetime_date(conn, request): # test support for datetime.date conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_date") + table_uuid = create_unique_table_name("test_date") df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"]) assert df.to_sql(name=table_uuid, con=conn, index=False) == 2 res = read_sql_table(table_uuid, conn) @@ -3073,9 +3071,9 @@ def test_datetime_time(conn, request, sqlite_buildin): # test support for datetime.time conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_time") - table_uuid2 = table_uuid_gen("test_time2") - table_uuid3 = table_uuid_gen("test_time3") + table_uuid = create_unique_table_name("test_time") + table_uuid2 = create_unique_table_name("test_time2") + table_uuid3 = create_unique_table_name("test_time3") df = DataFrame([time(9, 0, 0), time(9, 1, 30)], columns=["a"]) assert df.to_sql(name=table_uuid, con=conn, index=False) == 2 res = read_sql_table(table_uuid, conn) @@ -3102,7 +3100,7 @@ def test_datetime_time(conn, request, sqlite_buildin): def test_mixed_dtype_insert(conn, request): # see GH6509 conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_read_write") + table_uuid = create_unique_table_name("test_read_write") s1 = Series(2**25 + 1, dtype=np.int32) s2 = Series(0.0, dtype=np.float32) df = DataFrame({"s1": s1, "s2": s2}) @@ -3118,7 +3116,7 @@ def test_mixed_dtype_insert(conn, request): def test_nan_numeric(conn, request): # NaNs in numeric float column conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_nan") + table_uuid = create_unique_table_name("test_nan") df = DataFrame({"A": [0, 1, 2], "B": [0.2, np.nan, 5.6]}) assert df.to_sql(name=table_uuid, con=conn, index=False) == 3 @@ -3135,7 +3133,7 @@ def test_nan_numeric(conn, request): def test_nan_fullcolumn(conn, request): # full NaN column (numeric float column) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_nan") + table_uuid = create_unique_table_name("test_nan") df = DataFrame({"A": [0, 1, 2], "B": [np.nan, np.nan, np.nan]}) assert df.to_sql(name=table_uuid, con=conn, index=False) == 3 @@ -3154,7 +3152,7 @@ def test_nan_fullcolumn(conn, request): def test_nan_string(conn, request): # NaNs in string column conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_nan") + table_uuid = create_unique_table_name("test_nan") df = DataFrame({"A": [0, 1, 2], "B": ["a", "b", np.nan]}) assert df.to_sql(name=table_uuid, con=conn, index=False) == 3 @@ -3184,7 +3182,7 @@ def test_to_sql_save_index(conn, request): [(1, 2.1, "line1"), (2, 1.5, "line2")], columns=["A", "B", "C"], index=["A"] ) - tbl_name = table_uuid_gen("test_to_sql_saves_index") + tbl_name = create_unique_table_name("test_to_sql_saves_index") with pandasSQL_builder(conn) as pandasSQL: with pandasSQL.run_transaction(): assert pandasSQL.to_sql(df, tbl_name) == 2 @@ -3214,7 +3212,7 @@ def test_to_sql_save_index(conn, request): def test_transactions(conn, request): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_trans") + table_uuid = create_unique_table_name("test_trans") stmt = f"CREATE TABLE {table_uuid} (A INT, B TEXT)" if conn_name != "sqlite_buildin" and "adbc" not in conn_name: @@ -3231,7 +3229,7 @@ def test_transactions(conn, request): def test_transaction_rollback(conn, request): conn_name = conn conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_trans") + table_uuid = create_unique_table_name("test_trans") with pandasSQL_builder(conn) as pandasSQL: with pandasSQL.run_transaction() as trans: @@ -3286,7 +3284,7 @@ def test_get_schema_create_table(conn, request, test_frame3): from sqlalchemy import text from sqlalchemy.engine import Engine - tbl = table_uuid_gen("test_get_schema_create_table") + tbl = create_unique_table_name("test_get_schema_create_table") create_sql = sql.get_schema(test_frame3, tbl, con=conn) blank_test_df = test_frame3.iloc[:0] @@ -3318,11 +3316,11 @@ def test_dtype(conn, request): data = [(0.8, True), (0.9, None)] df = DataFrame(data, columns=cols) - table_uuid1 = table_uuid_gen("dtype_test") - table_uuid2 = table_uuid_gen("dtype_test2") - table_uuid3 = table_uuid_gen("dtype_test3") - table_uuid_single = table_uuid_gen("single_dtype_test") - error_table = table_uuid_gen("error") + table_uuid1 = create_unique_table_name("dtype_test") + table_uuid2 = create_unique_table_name("dtype_test2") + table_uuid3 = create_unique_table_name("dtype_test3") + table_uuid_single = create_unique_table_name("single_dtype_test") + error_table = create_unique_table_name("error") assert df.to_sql(name=table_uuid1, con=conn) == 2 assert df.to_sql(name=table_uuid2, con=conn, dtype={"B": TEXT}) == 2 @@ -3374,7 +3372,7 @@ def test_notna_dtype(conn, request): } df = DataFrame(cols) - tbl = table_uuid_gen("notna_dtype_test") + tbl = create_unique_table_name("notna_dtype_test") assert df.to_sql(name=tbl, con=conn) == 2 _ = sql.read_sql_table(tbl, conn) meta = MetaData() @@ -3413,7 +3411,7 @@ def test_double_precision(conn, request): } ) - table_uuid = table_uuid_gen("test_dtypes") + table_uuid = create_unique_table_name("test_dtypes") assert ( df.to_sql( name=table_uuid, @@ -3443,7 +3441,7 @@ def test_double_precision(conn, request): @pytest.mark.parametrize("conn", sqlalchemy_connectable) def test_connectable_issue_example(conn, request): conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_foo_data") + table_uuid = create_unique_table_name("test_foo_data") # This tests the example raised in issue # https://github.com/pandas-dev/pandas/issues/10104 @@ -3514,7 +3512,7 @@ def test_temporary_table(conn, request): pytest.skip("test does not work with str connection") conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("temp_test") + table_uuid = create_unique_table_name("temp_test") from sqlalchemy import ( Column, @@ -3557,7 +3555,7 @@ def test_invalid_engine(conn, request, test_frame1): ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame1") + table_uuid = create_unique_table_name("test_frame1") msg = "engine must be one of 'auto', 'sqlalchemy'" with pandasSQL_builder(conn) as pandasSQL: with pytest.raises(ValueError, match=msg): @@ -3569,7 +3567,7 @@ def test_to_sql_with_sql_engine(conn, request, test_frame1): """`to_sql` with the `engine` param""" # mostly copied from this class's `_to_sql()` method conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame1") + table_uuid = create_unique_table_name("test_frame1") with pandasSQL_builder(conn) as pandasSQL: with pandasSQL.run_transaction(): assert pandasSQL.to_sql(test_frame1, table_uuid, engine="auto") == 4 @@ -3584,7 +3582,7 @@ def test_to_sql_with_sql_engine(conn, request, test_frame1): def test_options_sqlalchemy(conn, request, test_frame1): # use the set option conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame1") + table_uuid = create_unique_table_name("test_frame1") with pd.option_context("io.sql.engine", "sqlalchemy"): with pandasSQL_builder(conn) as pandasSQL: with pandasSQL.run_transaction(): @@ -3600,7 +3598,7 @@ def test_options_sqlalchemy(conn, request, test_frame1): def test_options_auto(conn, request, test_frame1): # use the set option conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test_frame1") + table_uuid = create_unique_table_name("test_frame1") with pd.option_context("io.sql.engine", "auto"): with pandasSQL_builder(conn) as pandasSQL: with pandasSQL.run_transaction(): @@ -3646,7 +3644,7 @@ def test_read_sql_dtype_backend( # GH#50048 conn_name = conn conn = request.getfixturevalue(conn) - table = table_uuid_gen("test") + table = create_unique_table_name("test") df = dtype_backend_data df.to_sql(name=table, con=conn, index=False, if_exists="replace") @@ -3699,7 +3697,7 @@ def test_read_sql_dtype_backend_table( # GH#50048 conn_name = conn conn = request.getfixturevalue(conn) - table = table_uuid_gen("test") + table = create_unique_table_name("test") df = dtype_backend_data df.to_sql(name=table, con=conn, index=False, if_exists="replace") @@ -3728,7 +3726,7 @@ def test_read_sql_dtype_backend_table( @pytest.mark.parametrize("func", ["read_sql", "read_sql_table", "read_sql_query"]) def test_read_sql_invalid_dtype_backend_table(conn, request, func, dtype_backend_data): conn = request.getfixturevalue(conn) - table = table_uuid_gen("test") + table = create_unique_table_name("test") df = dtype_backend_data df.to_sql(name=table, con=conn, index=False, if_exists="replace") @@ -3809,7 +3807,7 @@ def test_chunksize_empty_dtypes(conn, request): pytest.mark.xfail(reason="chunksize argument NotImplemented with ADBC") ) conn = request.getfixturevalue(conn) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") dtypes = {"a": "int64", "b": "object"} df = DataFrame(columns=["a", "b"]).astype(dtypes) expected = df.copy() @@ -3830,7 +3828,7 @@ def test_chunksize_empty_dtypes(conn, request): def test_read_sql_dtype(conn, request, func, dtype_backend): # GH#50797 conn = request.getfixturevalue(conn) - table = table_uuid_gen("test") + table = create_unique_table_name("test") df = DataFrame({"a": [1, 2, 3], "b": 5}) df.to_sql(name=table, con=conn, index=False, if_exists="replace") @@ -3854,7 +3852,7 @@ def test_read_sql_dtype(conn, request, func, dtype_backend): def test_bigint_warning(sqlite_engine): conn = sqlite_engine - table_uuid = table_uuid_gen("test_bigintwarning") + table_uuid = create_unique_table_name("test_bigintwarning") # test no warning for BIGINT (to support int64) is raised (GH7433) df = DataFrame({"a": [1, 2]}, dtype="int64") assert df.to_sql(name=table_uuid, con=conn, index=False) == 2 @@ -3872,7 +3870,7 @@ def test_valueerror_exception(sqlite_engine): def test_row_object_is_named_tuple(sqlite_engine): conn = sqlite_engine - table_uuid = table_uuid_gen("test_frame") + table_uuid = create_unique_table_name("test_frame") # GH 40682 # Test for the is_named_tuple() function # Placed here due to its usage of sqlalchemy @@ -3912,7 +3910,7 @@ class Test(BaseModel): def test_read_sql_string_inference(sqlite_engine): conn = sqlite_engine # GH#54430 - table = table_uuid_gen("test") + table = create_unique_table_name("test") df = DataFrame({"a": ["x", "y"]}) df.to_sql(table, con=conn, index=False, if_exists="replace") @@ -3929,7 +3927,7 @@ def test_read_sql_string_inference(sqlite_engine): def test_roundtripping_datetimes(sqlite_engine): conn = sqlite_engine - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") # GH#54877 df = DataFrame({"t": [datetime(2020, 12, 31, 12)]}, dtype="datetime64[ns]") df.to_sql(table_uuid, conn, if_exists="replace", index=False) @@ -3949,7 +3947,7 @@ def sqlite_builtin_detect_types(): def test_roundtripping_datetimes_detect_types(sqlite_builtin_detect_types): # https://github.com/pandas-dev/pandas/issues/55554 conn = sqlite_builtin_detect_types - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") df = DataFrame({"t": [datetime(2020, 12, 31, 12)]}, dtype="datetime64[ns]") df.to_sql(table_uuid, conn, if_exists="replace", index=False) result = pd.read_sql(f"select * from {table_uuid}", conn).iloc[0, 0] @@ -3970,9 +3968,9 @@ def test_psycopg2_schema_support(postgresql_psycopg2_engine): con.exec_driver_sql("DROP SCHEMA IF EXISTS other CASCADE;") con.exec_driver_sql("CREATE SCHEMA other;") - schema_public_uuid = table_uuid_gen("test_schema_public") - schema_public_explicit_uuid = table_uuid_gen("test_schema_public_explicit") - schema_other_uuid = table_uuid_gen("test_schema_other") + schema_public_uuid = create_unique_table_name("test_schema_public") + schema_public_explicit_uuid = create_unique_table_name("test_schema_public_explicit") + schema_other_uuid = create_unique_table_name("test_schema_other") # write dataframe to different schema's assert df.to_sql(name=schema_public_uuid, con=conn, index=False) == 2 @@ -4037,7 +4035,7 @@ def test_self_join_date_columns(postgresql_psycopg2_engine): conn = postgresql_psycopg2_engine from sqlalchemy.sql import text - tb = table_uuid_gen("person") + tb = create_unique_table_name("person") create_table = text( f""" @@ -4070,7 +4068,7 @@ def test_self_join_date_columns(postgresql_psycopg2_engine): def test_create_and_drop_table(sqlite_engine): conn = sqlite_engine - table_uuid = table_uuid_gen("drop_test_frame") + table_uuid = create_unique_table_name("drop_test_frame") temp_frame = DataFrame({"one": [1.0, 2.0, 3.0, 4.0], "two": [4.0, 3.0, 2.0, 1.0]}) with sql.SQLDatabase(conn) as pandasSQL: with pandasSQL.run_transaction(): @@ -4086,7 +4084,7 @@ def test_create_and_drop_table(sqlite_engine): def test_sqlite_datetime_date(sqlite_buildin): conn = sqlite_buildin - table_uuid = table_uuid_gen("test_date") + table_uuid = create_unique_table_name("test_date") df = DataFrame([date(2014, 1, 1), date(2014, 1, 2)], columns=["a"]) assert df.to_sql(name=table_uuid, con=conn, index=False) == 2 res = read_sql_query(f"SELECT * FROM {table_uuid}", conn) @@ -4097,7 +4095,7 @@ def test_sqlite_datetime_date(sqlite_buildin): @pytest.mark.parametrize("tz_aware", [False, True]) def test_sqlite_datetime_time(tz_aware, sqlite_buildin): conn = sqlite_buildin - table_uuid = table_uuid_gen("test_time") + table_uuid = create_unique_table_name("test_time") # test support for datetime.time, GH #8341 if not tz_aware: tz_times = [time(9, 0, 0), time(9, 1, 30)] @@ -4124,10 +4122,10 @@ def get_sqlite_column_type(conn, table, column): def test_sqlite_test_dtype(sqlite_buildin): conn = sqlite_buildin - table_uuid = table_uuid_gen("dtype_test") - table_uuid2 = table_uuid_gen("dtype_test2") - table_error = table_uuid_gen("error") - table_single = table_uuid_gen("single_dtype_test") + table_uuid = create_unique_table_name("dtype_test") + table_uuid2 = create_unique_table_name("dtype_test2") + table_error = create_unique_table_name("error") + table_single = create_unique_table_name("single_dtype_test") cols = ["A", "B"] data = [(0.8, True), (0.9, None)] df = DataFrame(data, columns=cols) @@ -4158,7 +4156,7 @@ def test_sqlite_notna_dtype(sqlite_buildin): } df = DataFrame(cols) - tbl = table_uuid_gen("notna_dtype_test") + tbl = create_unique_table_name("notna_dtype_test") assert df.to_sql(name=tbl, con=conn) == 2 assert get_sqlite_column_type(conn, tbl, "Bool") == "INTEGER" @@ -4235,8 +4233,8 @@ def test_xsqlite_basic(sqlite_buildin): columns=Index(list("ABCD")), index=date_range("2000-01-01", periods=10, freq="B"), ) - table_uuid = table_uuid_gen("test_table") - table_uuid2 = table_uuid_gen("test_table2") + table_uuid = create_unique_table_name("test_table") + table_uuid2 = create_unique_table_name("test_table2") assert sql.to_sql(frame, name=table_uuid, con=sqlite_buildin, index=False) == 10 result = sql.read_sql(f"select * from {table_uuid}", sqlite_buildin) @@ -4266,7 +4264,7 @@ def test_xsqlite_write_row_by_row(sqlite_buildin): columns=Index(list("ABCD")), index=date_range("2000-01-01", periods=10, freq="B"), ) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") frame.iloc[0, 0] = np.nan create_sql = sql.get_schema(frame, table_uuid) cur = sqlite_buildin.cursor() @@ -4290,7 +4288,7 @@ def test_xsqlite_execute(sqlite_buildin): columns=Index(list("ABCD")), index=date_range("2000-01-01", periods=10, freq="B"), ) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = sql.get_schema(frame, table_uuid) cur = sqlite_buildin.cursor() cur.execute(create_sql) @@ -4312,7 +4310,7 @@ def test_xsqlite_schema(sqlite_buildin): columns=Index(list("ABCD")), index=date_range("2000-01-01", periods=10, freq="B"), ) - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = sql.get_schema(frame, table_uuid) lines = create_sql.splitlines() for line in lines: @@ -4328,7 +4326,7 @@ def test_xsqlite_schema(sqlite_buildin): def test_xsqlite_execute_fail(sqlite_buildin): - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = f""" CREATE TABLE {table_uuid} ( @@ -4350,7 +4348,7 @@ def test_xsqlite_execute_fail(sqlite_buildin): def test_xsqlite_execute_closed_connection(): - table_uuid = table_uuid_gen("test") + table_uuid = create_unique_table_name("test") create_sql = f""" CREATE TABLE {table_uuid} ( @@ -4373,7 +4371,7 @@ def test_xsqlite_execute_closed_connection(): def test_xsqlite_keyword_as_column_names(sqlite_buildin): - table_uuid = table_uuid_gen("testkeywords") + table_uuid = create_unique_table_name("testkeywords") df = DataFrame({"From": np.ones(5)}) assert sql.to_sql(df, con=sqlite_buildin, name=table_uuid, index=False) == 5 @@ -4381,7 +4379,7 @@ def test_xsqlite_keyword_as_column_names(sqlite_buildin): def test_xsqlite_onecolumn_of_integer(sqlite_buildin): # GH 3628 # a column_of_integers dataframe should transfer well to sql - table_uuid = table_uuid_gen("mono_df") + table_uuid = create_unique_table_name("mono_df") mono_df = DataFrame([1, 2], columns=["c0"]) assert sql.to_sql(mono_df, con=sqlite_buildin, name=table_uuid, index=False) == 2 # computing the sum via sql @@ -4397,7 +4395,7 @@ def test_xsqlite_onecolumn_of_integer(sqlite_buildin): def test_xsqlite_if_exists(sqlite_buildin): df_if_exists_1 = DataFrame({"col1": [1, 2], "col2": ["A", "B"]}) df_if_exists_2 = DataFrame({"col1": [3, 4, 5], "col2": ["C", "D", "E"]}) - table_name = table_uuid_gen("table_if_exists") + table_name = create_unique_table_name("table_if_exists") sql_select = f"SELECT * FROM {table_name}" msg = "'notvalidvalue' is not valid for if_exists"