diff --git a/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala b/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala index 42cc96bf5..14a664108 100644 --- a/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala +++ b/spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala @@ -142,7 +142,10 @@ case class CometScanExec( override lazy val metadata: Map[String, String] = if (wrapped == null) Map.empty else wrapped.metadata - override def verboseStringWithOperatorId(): String = wrapped.verboseStringWithOperatorId() + override def verboseStringWithOperatorId(): String = { + getTagValue(QueryPlan.OP_ID_TAG).foreach(id => wrapped.setTagValue(QueryPlan.OP_ID_TAG, id)) + wrapped.verboseStringWithOperatorId() + } lazy val inputRDD: RDD[InternalRow] = { val options = relation.options + diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q1/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q1/explain.txt index b0ea6bed8..59efef88e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q1/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q1/explain.txt @@ -41,7 +41,7 @@ TakeOrderedAndProject (40) +- CometScan parquet spark_catalog.default.customer (34) -(unknown) Scan parquet spark_catalog.default.store_returns +(1) Scan parquet spark_catalog.default.store_returns Output [4]: [sr_customer_sk#1, sr_store_sk#2, sr_return_amt#3, sr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -91,7 +91,7 @@ Results [3]: [sr_customer_sk#1 AS ctr_customer_sk#10, sr_store_sk#2 AS ctr_store Input [3]: [ctr_customer_sk#10, ctr_store_sk#11, ctr_total_return#12] Condition : isnotnull(ctr_total_return#12) -(unknown) Scan parquet spark_catalog.default.store_returns +(11) Scan parquet spark_catalog.default.store_returns Output [4]: [sr_customer_sk#1, sr_store_sk#2, sr_return_amt#3, sr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -173,7 +173,7 @@ Join condition: (cast(ctr_total_return#12 as decimal(24,7)) > (avg(ctr_total_ret Output [2]: [ctr_customer_sk#10, ctr_store_sk#11] Input [5]: [ctr_customer_sk#10, ctr_store_sk#11, ctr_total_return#12, (avg(ctr_total_return) * 1.2)#21, ctr_store_sk#11#22] -(unknown) Scan parquet spark_catalog.default.store +(27) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#23, s_state#24] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -205,7 +205,7 @@ Join condition: None Output [1]: [ctr_customer_sk#10] Input [3]: [ctr_customer_sk#10, ctr_store_sk#11, s_store_sk#23] -(unknown) Scan parquet spark_catalog.default.customer +(34) Scan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#25, c_customer_id#26] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -247,7 +247,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(unknown) Scan parquet spark_catalog.default.date_dim +(41) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_year#27] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q10/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q10/explain.txt index 1ea234408..15490b87d 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q10/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q10/explain.txt @@ -44,7 +44,7 @@ TakeOrderedAndProject (43) +- CometScan parquet spark_catalog.default.customer_demographics (34) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -58,7 +58,7 @@ Condition : (isnotnull(c_current_addr_sk#5) AND isnotnull(c_current_cdemo_sk#4)) (3) ColumnarToRow [codegen id : 9] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -91,7 +91,7 @@ Right keys [1]: [ss_customer_sk#6] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(11) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -124,7 +124,7 @@ Right keys [1]: [ws_bill_customer_sk#10] Join type: ExistenceJoin(exists#2) Join condition: None -(unknown) Scan parquet spark_catalog.default.catalog_sales +(18) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -165,7 +165,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(unknown) Scan parquet spark_catalog.default.customer_address +(27) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_county#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -197,7 +197,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#4] Input [3]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#18] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(34) Scan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_education_status#23, cd_purchase_estimate#24, cd_credit_rating#25, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -257,7 +257,7 @@ BroadcastExchange (48) +- CometScan parquet spark_catalog.default.date_dim (44) -(unknown) Scan parquet spark_catalog.default.date_dim +(44) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#38, d_moy#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt index 64f486f71..c663d4688 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q11/explain.txt @@ -73,7 +73,7 @@ TakeOrderedAndProject (72) +- ReusedExchange (63) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -87,7 +87,7 @@ Condition : (isnotnull(c_customer_sk#1) AND isnotnull(c_customer_id#2)) (3) ColumnarToRow [codegen id : 3] Input [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#9, ss_ext_discount_amt#10, ss_ext_list_price#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -151,7 +151,7 @@ Results [2]: [c_customer_id#2 AS customer_id#19, MakeDecimal(sum(UnscaledValue(( Input [2]: [customer_id#19, year_total#20] Condition : (isnotnull(year_total#20) AND (year_total#20 > 0.00)) -(unknown) Scan parquet spark_catalog.default.customer +(17) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#21, c_customer_id#22, c_first_name#23, c_last_name#24, c_preferred_cust_flag#25, c_birth_country#26, c_login#27, c_email_address#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -165,7 +165,7 @@ Condition : (isnotnull(c_customer_sk#21) AND isnotnull(c_customer_id#22)) (19) ColumnarToRow [codegen id : 6] Input [8]: [c_customer_sk#21, c_customer_id#22, c_first_name#23, c_last_name#24, c_preferred_cust_flag#25, c_birth_country#26, c_login#27, c_email_address#28] -(unknown) Scan parquet spark_catalog.default.store_sales +(20) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#29, ss_ext_discount_amt#30, ss_ext_list_price#31, ss_sold_date_sk#32] Batched: true Location: InMemoryFileIndex [] @@ -239,7 +239,7 @@ Join condition: None Output [4]: [customer_id#19, year_total#20, customer_preferred_cust_flag#39, year_total#40] Input [5]: [customer_id#19, year_total#20, customer_id#38, customer_preferred_cust_flag#39, year_total#40] -(unknown) Scan parquet spark_catalog.default.customer +(35) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#41, c_customer_id#42, c_first_name#43, c_last_name#44, c_preferred_cust_flag#45, c_birth_country#46, c_login#47, c_email_address#48] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -253,7 +253,7 @@ Condition : (isnotnull(c_customer_sk#41) AND isnotnull(c_customer_id#42)) (37) ColumnarToRow [codegen id : 10] Input [8]: [c_customer_sk#41, c_customer_id#42, c_first_name#43, c_last_name#44, c_preferred_cust_flag#45, c_birth_country#46, c_login#47, c_email_address#48] -(unknown) Scan parquet spark_catalog.default.web_sales +(38) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_bill_customer_sk#49, ws_ext_discount_amt#50, ws_ext_list_price#51, ws_sold_date_sk#52] Batched: true Location: InMemoryFileIndex [] @@ -331,7 +331,7 @@ Join condition: None Output [5]: [customer_id#19, year_total#20, customer_preferred_cust_flag#39, year_total#40, year_total#60] Input [6]: [customer_id#19, year_total#20, customer_preferred_cust_flag#39, year_total#40, customer_id#59, year_total#60] -(unknown) Scan parquet spark_catalog.default.customer +(54) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#61, c_customer_id#62, c_first_name#63, c_last_name#64, c_preferred_cust_flag#65, c_birth_country#66, c_login#67, c_email_address#68] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -345,7 +345,7 @@ Condition : (isnotnull(c_customer_sk#61) AND isnotnull(c_customer_id#62)) (56) ColumnarToRow [codegen id : 14] Input [8]: [c_customer_sk#61, c_customer_id#62, c_first_name#63, c_last_name#64, c_preferred_cust_flag#65, c_birth_country#66, c_login#67, c_email_address#68] -(unknown) Scan parquet spark_catalog.default.web_sales +(57) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_bill_customer_sk#69, ws_ext_discount_amt#70, ws_ext_list_price#71, ws_sold_date_sk#72] Batched: true Location: InMemoryFileIndex [] @@ -432,7 +432,7 @@ BroadcastExchange (76) +- CometScan parquet spark_catalog.default.date_dim (73) -(unknown) Scan parquet spark_catalog.default.date_dim +(73) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_year#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -457,7 +457,7 @@ BroadcastExchange (80) +- CometScan parquet spark_catalog.default.date_dim (77) -(unknown) Scan parquet spark_catalog.default.date_dim +(77) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#34, d_year#35] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt index 23ff8f948..6cf7f4b08 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q12/explain.txt @@ -21,7 +21,7 @@ TakeOrderedAndProject (20) +- ReusedExchange (10) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -36,7 +36,7 @@ Condition : isnotnull(ws_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -125,7 +125,7 @@ BroadcastExchange (25) +- CometScan parquet spark_catalog.default.date_dim (21) -(unknown) Scan parquet spark_catalog.default.date_dim +(21) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt index 759871556..a647b1f05 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q13/explain.txt @@ -35,7 +35,7 @@ +- CometScan parquet spark_catalog.default.household_demographics (26) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [10]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -50,7 +50,7 @@ Condition : (((((isnotnull(ss_store_sk#4) AND isnotnull(ss_addr_sk#3)) AND isnot (3) ColumnarToRow [codegen id : 6] Input [10]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] -(unknown) Scan parquet spark_catalog.default.store +(4) Scan parquet spark_catalog.default.store Output [1]: [s_store_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -78,7 +78,7 @@ Join condition: None Output [9]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10] Input [11]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_net_profit#9, ss_sold_date_sk#10, s_store_sk#12] -(unknown) Scan parquet spark_catalog.default.customer_address +(10) Scan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -123,7 +123,7 @@ Join condition: None Output [6]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8] Input [8]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, ss_sold_date_sk#10, d_date_sk#16] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(20) Scan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#17, cd_marital_status#18, cd_education_status#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -151,7 +151,7 @@ Join condition: ((((((cd_marital_status#18 = M) AND (cd_education_status#19 = Ad Output [7]: [ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_marital_status#18, cd_education_status#19] Input [9]: [ss_cdemo_sk#1, ss_hdemo_sk#2, ss_quantity#5, ss_sales_price#6, ss_ext_sales_price#7, ss_ext_wholesale_cost#8, cd_demo_sk#17, cd_marital_status#18, cd_education_status#19] -(unknown) Scan parquet spark_catalog.default.household_demographics +(26) Scan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#20, hd_dep_count#21] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -207,7 +207,7 @@ BroadcastExchange (39) +- CometScan parquet spark_catalog.default.date_dim (35) -(unknown) Scan parquet spark_catalog.default.date_dim +(35) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt index 1323cb8df..627a77a88 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14a/explain.txt @@ -106,7 +106,7 @@ TakeOrderedAndProject (105) +- ReusedExchange (92) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -121,7 +121,7 @@ Condition : isnotnull(ss_item_sk#1) (3) ColumnarToRow [codegen id : 25] Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -135,7 +135,7 @@ Condition : ((isnotnull(i_brand_id#7) AND isnotnull(i_class_id#8)) AND isnotnull (6) ColumnarToRow [codegen id : 11] Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] -(unknown) Scan parquet spark_catalog.default.store_sales +(7) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#10, ss_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Condition : isnotnull(ss_item_sk#10) (9) ColumnarToRow [codegen id : 6] Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] -(unknown) Scan parquet spark_catalog.default.item +(10) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -164,7 +164,7 @@ Condition : (((isnotnull(i_item_sk#13) AND isnotnull(i_brand_id#14)) AND isnotnu (12) ColumnarToRow [codegen id : 4] Input [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(13) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#17, cs_sold_date_sk#18] Batched: true Location: InMemoryFileIndex [] @@ -179,7 +179,7 @@ Condition : isnotnull(cs_item_sk#17) (15) ColumnarToRow [codegen id : 3] Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] -(unknown) Scan parquet spark_catalog.default.item +(16) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -275,7 +275,7 @@ Functions: [] Aggregate Attributes: [] Results [3]: [brand_id#26, class_id#27, category_id#28] -(unknown) Scan parquet spark_catalog.default.web_sales +(36) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#29, ws_sold_date_sk#30] Batched: true Location: InMemoryFileIndex [] @@ -350,7 +350,7 @@ Right keys [1]: [ss_item_sk#37] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.item +(52) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#38, i_brand_id#39, i_class_id#40, i_category_id#41] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -426,7 +426,7 @@ Condition : (isnotnull(sales#51) AND (cast(sales#51 as decimal(32,6)) > cast(Sub Output [6]: [sales#51, number_sales#52, store AS channel#55, i_brand_id#39 AS i_brand_id#56, i_class_id#40 AS i_class_id#57, i_category_id#41 AS i_category_id#58] Input [5]: [i_brand_id#39, i_class_id#40, i_category_id#41, sales#51, number_sales#52] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(68) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#59, cs_quantity#60, cs_list_price#61, cs_sold_date_sk#62] Batched: true Location: InMemoryFileIndex [] @@ -502,7 +502,7 @@ Condition : (isnotnull(sales#77) AND (cast(sales#77 as decimal(32,6)) > cast(Reu Output [6]: [sales#77, number_sales#78, catalog AS channel#79, i_brand_id#65, i_class_id#66, i_category_id#67] Input [5]: [i_brand_id#65, i_class_id#66, i_category_id#67, sales#77, number_sales#78] -(unknown) Scan parquet spark_catalog.default.web_sales +(84) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#80, ws_quantity#81, ws_list_price#82, ws_sold_date_sk#83] Batched: true Location: InMemoryFileIndex [] @@ -630,7 +630,7 @@ Subquery:1 Hosting operator id = 66 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (118) -(unknown) Scan parquet spark_catalog.default.store_sales +(106) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#116, ss_list_price#117, ss_sold_date_sk#118] Batched: true Location: InMemoryFileIndex [] @@ -653,7 +653,7 @@ Join condition: None Output [2]: [ss_quantity#116 AS quantity#121, ss_list_price#117 AS list_price#122] Input [4]: [ss_quantity#116, ss_list_price#117, ss_sold_date_sk#118, d_date_sk#120] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(111) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#123, cs_list_price#124, cs_sold_date_sk#125] Batched: true Location: InMemoryFileIndex [] @@ -676,7 +676,7 @@ Join condition: None Output [2]: [cs_quantity#123 AS quantity#128, cs_list_price#124 AS list_price#129] Input [4]: [cs_quantity#123, cs_list_price#124, cs_sold_date_sk#125, d_date_sk#127] -(unknown) Scan parquet spark_catalog.default.web_sales +(116) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#130, ws_list_price#131, ws_sold_date_sk#132] Batched: true Location: InMemoryFileIndex [] @@ -733,7 +733,7 @@ BroadcastExchange (129) +- CometScan parquet spark_catalog.default.date_dim (125) -(unknown) Scan parquet spark_catalog.default.date_dim +(125) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#42, d_year#143, d_moy#144] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -763,7 +763,7 @@ BroadcastExchange (134) +- CometScan parquet spark_catalog.default.date_dim (130) -(unknown) Scan parquet spark_catalog.default.date_dim +(130) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#25, d_year#145] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt index 536306de5..7bba251de 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q14b/explain.txt @@ -85,7 +85,7 @@ TakeOrderedAndProject (84) +- ReusedExchange (75) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ Condition : isnotnull(ss_item_sk#1) (3) ColumnarToRow [codegen id : 25] Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -114,7 +114,7 @@ Condition : ((isnotnull(i_brand_id#7) AND isnotnull(i_class_id#8)) AND isnotnull (6) ColumnarToRow [codegen id : 11] Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] -(unknown) Scan parquet spark_catalog.default.store_sales +(7) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#10, ss_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -129,7 +129,7 @@ Condition : isnotnull(ss_item_sk#10) (9) ColumnarToRow [codegen id : 6] Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] -(unknown) Scan parquet spark_catalog.default.item +(10) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -143,7 +143,7 @@ Condition : (((isnotnull(i_item_sk#13) AND isnotnull(i_brand_id#14)) AND isnotnu (12) ColumnarToRow [codegen id : 4] Input [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(13) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#17, cs_sold_date_sk#18] Batched: true Location: InMemoryFileIndex [] @@ -158,7 +158,7 @@ Condition : isnotnull(cs_item_sk#17) (15) ColumnarToRow [codegen id : 3] Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] -(unknown) Scan parquet spark_catalog.default.item +(16) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -254,7 +254,7 @@ Functions: [] Aggregate Attributes: [] Results [3]: [brand_id#26, class_id#27, category_id#28] -(unknown) Scan parquet spark_catalog.default.web_sales +(36) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#29, ws_sold_date_sk#30] Batched: true Location: InMemoryFileIndex [] @@ -329,7 +329,7 @@ Right keys [1]: [ss_item_sk#37] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.item +(52) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#38, i_brand_id#39, i_class_id#40, i_category_id#41] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -401,7 +401,7 @@ Results [6]: [store AS channel#51, i_brand_id#39, i_class_id#40, i_category_id#4 Input [6]: [channel#51, i_brand_id#39, i_class_id#40, i_category_id#41, sales#52, number_sales#53] Condition : (isnotnull(sales#52) AND (cast(sales#52 as decimal(32,6)) > cast(Subquery scalar-subquery#54, [id=#55] as decimal(32,6)))) -(unknown) Scan parquet spark_catalog.default.store_sales +(67) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#56, ss_quantity#57, ss_list_price#58, ss_sold_date_sk#59] Batched: true Location: InMemoryFileIndex [] @@ -511,7 +511,7 @@ Subquery:1 Hosting operator id = 66 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (97) -(unknown) Scan parquet spark_catalog.default.store_sales +(85) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#77, ss_list_price#78, ss_sold_date_sk#79] Batched: true Location: InMemoryFileIndex [] @@ -534,7 +534,7 @@ Join condition: None Output [2]: [ss_quantity#77 AS quantity#82, ss_list_price#78 AS list_price#83] Input [4]: [ss_quantity#77, ss_list_price#78, ss_sold_date_sk#79, d_date_sk#81] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(90) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#84, cs_list_price#85, cs_sold_date_sk#86] Batched: true Location: InMemoryFileIndex [] @@ -557,7 +557,7 @@ Join condition: None Output [2]: [cs_quantity#84 AS quantity#89, cs_list_price#85 AS list_price#90] Input [4]: [cs_quantity#84, cs_list_price#85, cs_sold_date_sk#86, d_date_sk#88] -(unknown) Scan parquet spark_catalog.default.web_sales +(95) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#91, ws_list_price#92, ws_sold_date_sk#93] Batched: true Location: InMemoryFileIndex [] @@ -614,7 +614,7 @@ BroadcastExchange (108) +- CometScan parquet spark_catalog.default.date_dim (104) -(unknown) Scan parquet spark_catalog.default.date_dim +(104) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#42, d_week_seq#104] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -643,7 +643,7 @@ Subquery:6 Hosting operator id = 105 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (109) -(unknown) Scan parquet spark_catalog.default.date_dim +(109) Scan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#107, d_year#108, d_moy#109, d_dom#110] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -669,7 +669,7 @@ BroadcastExchange (117) +- CometScan parquet spark_catalog.default.date_dim (113) -(unknown) Scan parquet spark_catalog.default.date_dim +(113) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#25, d_year#111] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -705,7 +705,7 @@ BroadcastExchange (122) +- CometScan parquet spark_catalog.default.date_dim (118) -(unknown) Scan parquet spark_catalog.default.date_dim +(118) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#65, d_week_seq#112] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -734,7 +734,7 @@ Subquery:12 Hosting operator id = 119 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (123) -(unknown) Scan parquet spark_catalog.default.date_dim +(123) Scan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#115, d_year#116, d_moy#117, d_dom#118] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt index 811a9eef5..13a621f77 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q15/explain.txt @@ -23,7 +23,7 @@ TakeOrderedAndProject (22) +- ReusedExchange (16) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -38,7 +38,7 @@ Condition : isnotnull(cs_bill_customer_sk#1) (3) ColumnarToRow [codegen id : 4] Input [3]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.customer +(4) Scan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#5, c_current_addr_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -66,7 +66,7 @@ Join condition: None Output [3]: [cs_sales_price#2, cs_sold_date_sk#3, c_current_addr_sk#6] Input [5]: [cs_bill_customer_sk#1, cs_sales_price#2, cs_sold_date_sk#3, c_customer_sk#5, c_current_addr_sk#6] -(unknown) Scan parquet spark_catalog.default.customer_address +(10) Scan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#7, ca_state#8, ca_zip#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -139,7 +139,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(unknown) Scan parquet spark_catalog.default.date_dim +(23) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#15, d_qoy#16] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt index d8ed9c06e..ccec341ad 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q16/explain.txt @@ -46,7 +46,7 @@ +- CometScan parquet spark_catalog.default.call_center (34) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, cs_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -72,7 +72,7 @@ Arguments: hashpartitioning(cs_order_number#5, 5), ENSURE_REQUIREMENTS, [plan_id Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Arguments: [cs_order_number#5 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_sales +(7) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_warehouse_sk#9, cs_order_number#10, cs_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -103,7 +103,7 @@ Join condition: NOT (cs_warehouse_sk#4 = cs_warehouse_sk#9) Output [6]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_warehouse_sk#4, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(14) Scan parquet spark_catalog.default.catalog_returns Output [2]: [cr_order_number#12, cr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -130,7 +130,7 @@ Right keys [1]: [cr_order_number#12] Join type: LeftAnti Join condition: None -(unknown) Scan parquet spark_catalog.default.date_dim +(20) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -162,7 +162,7 @@ Join condition: None Output [5]: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Input [7]: [cs_ship_date_sk#1, cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.customer_address +(27) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -194,7 +194,7 @@ Join condition: None Output [4]: [cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7] Input [6]: [cs_ship_addr_sk#2, cs_call_center_sk#3, cs_order_number#5, cs_ext_ship_cost#6, cs_net_profit#7, ca_address_sk#16] -(unknown) Scan parquet spark_catalog.default.call_center +(34) Scan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#18, cc_county#19] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt index 5e9b6d1da..0de98cfb0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q17/explain.txt @@ -41,7 +41,7 @@ TakeOrderedAndProject (40) +- CometScan parquet spark_catalog.default.item (31) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_quantity#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -56,7 +56,7 @@ Condition : (((isnotnull(ss_customer_sk#2) AND isnotnull(ss_item_sk#1)) AND isno (3) ColumnarToRow [codegen id : 8] Input [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_quantity#5, ss_sold_date_sk#6] -(unknown) Scan parquet spark_catalog.default.store_returns +(4) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#8, sr_customer_sk#9, sr_ticket_number#10, sr_return_quantity#11, sr_returned_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -85,7 +85,7 @@ Join condition: None Output [8]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#5, ss_sold_date_sk#6, sr_item_sk#8, sr_customer_sk#9, sr_return_quantity#11, sr_returned_date_sk#12] Input [11]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_quantity#5, ss_sold_date_sk#6, sr_item_sk#8, sr_customer_sk#9, sr_ticket_number#10, sr_return_quantity#11, sr_returned_date_sk#12] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(10) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_customer_sk#14, cs_item_sk#15, cs_quantity#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -153,7 +153,7 @@ Join condition: None Output [5]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#5, sr_return_quantity#11, cs_quantity#16] Input [7]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#5, sr_return_quantity#11, cs_quantity#16, cs_sold_date_sk#17, d_date_sk#21] -(unknown) Scan parquet spark_catalog.default.store +(25) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#22, s_state#23] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -181,7 +181,7 @@ Join condition: None Output [5]: [ss_item_sk#1, ss_quantity#5, sr_return_quantity#11, cs_quantity#16, s_state#23] Input [7]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#5, sr_return_quantity#11, cs_quantity#16, s_store_sk#22, s_state#23] -(unknown) Scan parquet spark_catalog.default.item +(31) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#24, i_item_id#25, i_item_desc#26] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -241,7 +241,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(unknown) Scan parquet spark_catalog.default.date_dim +(41) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#19, d_quarter_name#84] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -271,7 +271,7 @@ BroadcastExchange (50) +- CometScan parquet spark_catalog.default.date_dim (46) -(unknown) Scan parquet spark_catalog.default.date_dim +(46) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#20, d_quarter_name#85] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q18/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q18/explain.txt index 613377a61..1e9c660c5 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q18/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q18/explain.txt @@ -44,7 +44,7 @@ TakeOrderedAndProject (43) +- CometScan parquet spark_catalog.default.item (33) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -59,7 +59,7 @@ Condition : ((isnotnull(cs_bill_cdemo_sk#2) AND isnotnull(cs_bill_customer_sk#1) (3) ColumnarToRow [codegen id : 7] Input [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(4) Scan parquet spark_catalog.default.customer_demographics Output [4]: [cd_demo_sk#11, cd_gender#12, cd_education_status#13, cd_dep_count#14] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -91,7 +91,7 @@ Join condition: None Output [9]: [cs_bill_customer_sk#1, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14] Input [11]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_demo_sk#11, cd_dep_count#14] -(unknown) Scan parquet spark_catalog.default.customer +(11) Scan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#15, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_month#18, c_birth_year#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -123,7 +123,7 @@ Join condition: None Output [11]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19] Input [13]: [cs_bill_customer_sk#1, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_customer_sk#15, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(18) Scan parquet spark_catalog.default.customer_demographics Output [1]: [cd_demo_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -151,7 +151,7 @@ Join condition: None Output [10]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_addr_sk#17, c_birth_year#19] Input [12]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19, cd_demo_sk#20] -(unknown) Scan parquet spark_catalog.default.customer_address +(24) Scan parquet spark_catalog.default.customer_address Output [4]: [ca_address_sk#21, ca_county#22, ca_state#23, ca_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -192,7 +192,7 @@ Join condition: None Output [11]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cd_dep_count#14, c_birth_year#19, ca_county#22, ca_state#23, ca_country#24] Input [13]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_birth_year#19, ca_county#22, ca_state#23, ca_country#24, d_date_sk#25] -(unknown) Scan parquet spark_catalog.default.item +(33) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#26, i_item_id#27] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -256,7 +256,7 @@ BroadcastExchange (48) +- CometScan parquet spark_catalog.default.date_dim (44) -(unknown) Scan parquet spark_catalog.default.date_dim +(44) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#25, d_year#75] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q19/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q19/explain.txt index 1150b3d66..999fec838 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q19/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q19/explain.txt @@ -40,7 +40,7 @@ TakeOrderedAndProject (39) +- CometScan parquet spark_catalog.default.store (30) -(unknown) Scan parquet spark_catalog.default.date_dim +(1) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -58,7 +58,7 @@ Arguments: [d_date_sk#1], [d_date_sk#1] (4) ColumnarToRow [codegen id : 6] Input [1]: [d_date_sk#1] -(unknown) Scan parquet spark_catalog.default.store_sales +(5) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#4, ss_customer_sk#5, ss_store_sk#6, ss_ext_sales_price#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ Join condition: None Output [4]: [ss_item_sk#4, ss_customer_sk#5, ss_store_sk#6, ss_ext_sales_price#7] Input [6]: [d_date_sk#1, ss_item_sk#4, ss_customer_sk#5, ss_store_sk#6, ss_ext_sales_price#7, ss_sold_date_sk#8] -(unknown) Scan parquet spark_catalog.default.item +(11) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#9, i_brand_id#10, i_brand#11, i_manufact_id#12, i_manufact#13, i_manager_id#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -119,7 +119,7 @@ Join condition: None Output [7]: [ss_customer_sk#5, ss_store_sk#6, ss_ext_sales_price#7, i_brand_id#10, i_brand#11, i_manufact_id#12, i_manufact#13] Input [9]: [ss_item_sk#4, ss_customer_sk#5, ss_store_sk#6, ss_ext_sales_price#7, i_item_sk#9, i_brand_id#10, i_brand#11, i_manufact_id#12, i_manufact#13] -(unknown) Scan parquet spark_catalog.default.customer +(18) Scan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#15, c_current_addr_sk#16] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -147,7 +147,7 @@ Join condition: None Output [7]: [ss_store_sk#6, ss_ext_sales_price#7, i_brand_id#10, i_brand#11, i_manufact_id#12, i_manufact#13, c_current_addr_sk#16] Input [9]: [ss_customer_sk#5, ss_store_sk#6, ss_ext_sales_price#7, i_brand_id#10, i_brand#11, i_manufact_id#12, i_manufact#13, c_customer_sk#15, c_current_addr_sk#16] -(unknown) Scan parquet spark_catalog.default.customer_address +(24) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#17, ca_zip#18] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -175,7 +175,7 @@ Join condition: None Output [7]: [ss_store_sk#6, ss_ext_sales_price#7, i_brand_id#10, i_brand#11, i_manufact_id#12, i_manufact#13, ca_zip#18] Input [9]: [ss_store_sk#6, ss_ext_sales_price#7, i_brand_id#10, i_brand#11, i_manufact_id#12, i_manufact#13, c_current_addr_sk#16, ca_address_sk#17, ca_zip#18] -(unknown) Scan parquet spark_catalog.default.store +(30) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#19, s_zip#20] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q2/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q2/explain.txt index 2fea53fa7..18de3145c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q2/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q2/explain.txt @@ -37,7 +37,7 @@ +- CometScan parquet spark_catalog.default.date_dim (25) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_ext_sales_price#1, ws_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -48,7 +48,7 @@ ReadSchema: struct Input [2]: [ws_ext_sales_price#1, ws_sold_date_sk#2] Arguments: [sold_date_sk#3, sales_price#4], [ws_sold_date_sk#2 AS sold_date_sk#3, ws_ext_sales_price#1 AS sales_price#4] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(3) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ext_sales_price#5, cs_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -66,7 +66,7 @@ Child 1 Input [2]: [sold_date_sk#7, sales_price#8] (6) ColumnarToRow [codegen id : 2] Input [2]: [sold_date_sk#3, sales_price#4] -(unknown) Scan parquet spark_catalog.default.date_dim +(7) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_week_seq#10, d_day_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -112,7 +112,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sal Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END))#27, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END))#28, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END))#29, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END))#30, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END))#31, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))#32] Results [8]: [d_week_seq#10, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END))#26,17,2) AS sun_sales#33, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END))#27,17,2) AS mon_sales#34, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END))#28,17,2) AS tue_sales#35, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END))#29,17,2) AS wed_sales#36, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END))#30,17,2) AS thu_sales#37, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END))#31,17,2) AS fri_sales#38, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))#32,17,2) AS sat_sales#39] -(unknown) Scan parquet spark_catalog.default.date_dim +(16) Scan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#40, d_year#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -154,7 +154,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sal Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END))#27, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END))#28, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END))#29, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END))#30, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END))#31, sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))#32] Results [8]: [d_week_seq#10, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Sunday ) THEN sales_price#4 END))#26,17,2) AS sun_sales#33, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Monday ) THEN sales_price#4 END))#27,17,2) AS mon_sales#34, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Tuesday ) THEN sales_price#4 END))#28,17,2) AS tue_sales#35, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Wednesday) THEN sales_price#4 END))#29,17,2) AS wed_sales#36, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Thursday ) THEN sales_price#4 END))#30,17,2) AS thu_sales#37, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Friday ) THEN sales_price#4 END))#31,17,2) AS fri_sales#38, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#11 = Saturday ) THEN sales_price#4 END))#32,17,2) AS sat_sales#39] -(unknown) Scan parquet spark_catalog.default.date_dim +(25) Scan parquet spark_catalog.default.date_dim Output [2]: [d_week_seq#57, d_year#58] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt index eedf666dd..333ef218c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q20/explain.txt @@ -21,7 +21,7 @@ TakeOrderedAndProject (20) +- ReusedExchange (10) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -36,7 +36,7 @@ Condition : isnotnull(cs_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -125,7 +125,7 @@ BroadcastExchange (25) +- CometScan parquet spark_catalog.default.date_dim (21) -(unknown) Scan parquet spark_catalog.default.date_dim +(21) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt index b5625d8e0..77f17c7f3 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q21/explain.txt @@ -25,7 +25,7 @@ TakeOrderedAndProject (24) +- ReusedExchange (17) -(unknown) Scan parquet spark_catalog.default.inventory +(1) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -40,7 +40,7 @@ Condition : (isnotnull(inv_warehouse_sk#2) AND isnotnull(inv_item_sk#1)) (3) ColumnarToRow [codegen id : 4] Input [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] -(unknown) Scan parquet spark_catalog.default.warehouse +(4) Scan parquet spark_catalog.default.warehouse Output [2]: [w_warehouse_sk#6, w_warehouse_name#7] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -68,7 +68,7 @@ Join condition: None Output [4]: [inv_item_sk#1, inv_quantity_on_hand#3, inv_date_sk#4, w_warehouse_name#7] Input [6]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4, w_warehouse_sk#6, w_warehouse_name#7] -(unknown) Scan parquet spark_catalog.default.item +(10) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#8, i_item_id#9, i_current_price#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -148,7 +148,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (25) -(unknown) Scan parquet spark_catalog.default.date_dim +(25) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#12] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q22/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q22/explain.txt index 7dfa2dc06..9f5771fed 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q22/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q22/explain.txt @@ -24,7 +24,7 @@ TakeOrderedAndProject (23) +- CometScan parquet spark_catalog.default.warehouse (13) -(unknown) Scan parquet spark_catalog.default.inventory +(1) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -52,7 +52,7 @@ Join condition: None Output [3]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3] Input [5]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4, d_date_sk#6] -(unknown) Scan parquet spark_catalog.default.item +(7) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#7, i_brand#8, i_class#9, i_category#10, i_product_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -80,7 +80,7 @@ Join condition: None Output [6]: [inv_warehouse_sk#2, inv_quantity_on_hand#3, i_brand#8, i_class#9, i_category#10, i_product_name#11] Input [8]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, i_item_sk#7, i_brand#8, i_class#9, i_category#10, i_product_name#11] -(unknown) Scan parquet spark_catalog.default.warehouse +(13) Scan parquet spark_catalog.default.warehouse Output [1]: [w_warehouse_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -144,7 +144,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(unknown) Scan parquet spark_catalog.default.date_dim +(24) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_month_seq#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23a/explain.txt index 5ec1794c1..78d2b8519 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q23a/explain.txt @@ -67,7 +67,7 @@ +- ReusedExchange (60) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [5]: [cs_bill_customer_sk#1, cs_item_sk#2, cs_quantity#3, cs_list_price#4, cs_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -77,7 +77,7 @@ ReadSchema: struct (avg(ctr_total_ret Output [2]: [ctr_customer_sk#12, ctr_total_return#14] Input [5]: [ctr_customer_sk#12, ctr_state#13, ctr_total_return#14, (avg(ctr_total_return) * 1.2)#23, ctr_state#13#24] -(unknown) Scan parquet spark_catalog.default.customer +(36) Scan parquet spark_catalog.default.customer Output [14]: [c_customer_sk#25, c_customer_id#26, c_current_addr_sk#27, c_salutation#28, c_first_name#29, c_last_name#30, c_preferred_cust_flag#31, c_birth_day#32, c_birth_month#33, c_birth_year#34, c_birth_country#35, c_login#36, c_email_address#37, c_last_review_date#38] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -251,7 +251,7 @@ Join condition: None Output [14]: [ctr_total_return#14, c_customer_id#26, c_current_addr_sk#27, c_salutation#28, c_first_name#29, c_last_name#30, c_preferred_cust_flag#31, c_birth_day#32, c_birth_month#33, c_birth_year#34, c_birth_country#35, c_login#36, c_email_address#37, c_last_review_date#38] Input [16]: [ctr_customer_sk#12, ctr_total_return#14, c_customer_sk#25, c_customer_id#26, c_current_addr_sk#27, c_salutation#28, c_first_name#29, c_last_name#30, c_preferred_cust_flag#31, c_birth_day#32, c_birth_month#33, c_birth_year#34, c_birth_country#35, c_login#36, c_email_address#37, c_last_review_date#38] -(unknown) Scan parquet spark_catalog.default.customer_address +(42) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#39, ca_state#40] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -297,7 +297,7 @@ BroadcastExchange (54) +- CometScan parquet spark_catalog.default.date_dim (50) -(unknown) Scan parquet spark_catalog.default.date_dim +(50) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_year#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q31/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q31/explain.txt index 7b1f7bb5d..e20d45486 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q31/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q31/explain.txt @@ -91,7 +91,7 @@ +- ReusedExchange (80) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -119,7 +119,7 @@ Join condition: None Output [4]: [ss_addr_sk#1, ss_ext_sales_price#2, d_year#6, d_qoy#7] Input [6]: [ss_addr_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3, d_date_sk#5, d_year#6, d_qoy#7] -(unknown) Scan parquet spark_catalog.default.customer_address +(7) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#8, ca_county#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -165,7 +165,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#2))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#2))#12] Results [3]: [ca_county#9, d_year#6, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#2))#12,17,2) AS store_sales#13] -(unknown) Scan parquet spark_catalog.default.store_sales +(16) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#14, ss_ext_sales_price#15, ss_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -234,7 +234,7 @@ Right keys [1]: [ca_county#22] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.store_sales +(30) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_addr_sk#26, ss_ext_sales_price#27, ss_sold_date_sk#28] Batched: true Location: InMemoryFileIndex [] @@ -307,7 +307,7 @@ Join condition: None Output [5]: [ca_county#9, d_year#6, store_sales#13, store_sales#25, store_sales#37] Input [7]: [ca_county#9, d_year#6, store_sales#13, ca_county#22, store_sales#25, ca_county#34, store_sales#37] -(unknown) Scan parquet spark_catalog.default.web_sales +(45) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#38, ws_ext_sales_price#39, ws_sold_date_sk#40] Batched: true Location: InMemoryFileIndex [] @@ -376,7 +376,7 @@ Right keys [1]: [ca_county#46] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(59) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#51, ws_ext_sales_price#52, ws_sold_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -449,7 +449,7 @@ Join condition: (CASE WHEN (web_sales#50 > 0.00) THEN (web_sales#62 / web_sales# Output [8]: [ca_county#9, d_year#6, store_sales#13, store_sales#25, store_sales#37, ca_county#46, web_sales#50, web_sales#62] Input [9]: [ca_county#9, d_year#6, store_sales#13, store_sales#25, store_sales#37, ca_county#46, web_sales#50, ca_county#59, web_sales#62] -(unknown) Scan parquet spark_catalog.default.web_sales +(74) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_addr_sk#63, ws_ext_sales_price#64, ws_sold_date_sk#65] Batched: true Location: InMemoryFileIndex [] @@ -539,7 +539,7 @@ BroadcastExchange (94) +- CometScan parquet spark_catalog.default.date_dim (91) -(unknown) Scan parquet spark_catalog.default.date_dim +(91) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#6, d_qoy#7] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -564,7 +564,7 @@ BroadcastExchange (98) +- CometScan parquet spark_catalog.default.date_dim (95) -(unknown) Scan parquet spark_catalog.default.date_dim +(95) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#18, d_year#19, d_qoy#20] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -589,7 +589,7 @@ BroadcastExchange (102) +- CometScan parquet spark_catalog.default.date_dim (99) -(unknown) Scan parquet spark_catalog.default.date_dim +(99) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#30, d_year#31, d_qoy#32] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt index e3c659e27..bd7caaaa1 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q32/explain.txt @@ -30,7 +30,7 @@ +- ReusedExchange (24) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ Condition : (isnotnull(cs_item_sk#1) AND isnotnull(cs_ext_discount_amt#2)) (3) ColumnarToRow [codegen id : 6] Input [3]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -77,7 +77,7 @@ Join condition: None Output [3]: [cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5] Input [4]: [cs_item_sk#1, cs_ext_discount_amt#2, cs_sold_date_sk#3, i_item_sk#5] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(11) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#7, cs_ext_discount_amt#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -182,7 +182,7 @@ BroadcastExchange (34) +- CometScan parquet spark_catalog.default.date_dim (30) -(unknown) Scan parquet spark_catalog.default.date_dim +(30) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#18, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt index d7dda8078..f9541481c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q33/explain.txt @@ -64,7 +64,7 @@ TakeOrderedAndProject (63) +- ReusedExchange (53) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -92,7 +92,7 @@ Join condition: None Output [3]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3] Input [5]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4, d_date_sk#6] -(unknown) Scan parquet spark_catalog.default.customer_address +(7) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#7, ca_gmt_offset#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -124,7 +124,7 @@ Join condition: None Output [2]: [ss_item_sk#1, ss_ext_sales_price#3] Input [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ca_address_sk#7] -(unknown) Scan parquet spark_catalog.default.item +(14) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#9, i_manufact_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -138,7 +138,7 @@ Condition : isnotnull(i_item_sk#9) (16) ColumnarToRow [codegen id : 4] Input [2]: [i_item_sk#9, i_manufact_id#10] -(unknown) Scan parquet spark_catalog.default.item +(17) Scan parquet spark_catalog.default.item Output [2]: [i_category#11, i_manufact_id#12] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -198,7 +198,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#3))#15] Results [2]: [i_manufact_id#10, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#3))#15,17,2) AS total_sales#16] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(29) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#17, cs_item_sk#18, cs_ext_sales_price#19, cs_sold_date_sk#20] Batched: true Location: InMemoryFileIndex [] @@ -270,7 +270,7 @@ Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#19))] Aggregate Attributes [1]: [sum(UnscaledValue(cs_ext_sales_price#19))#28] Results [2]: [i_manufact_id#25, MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#19))#28,17,2) AS total_sales#29] -(unknown) Scan parquet spark_catalog.default.web_sales +(44) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#30, ws_bill_addr_sk#31, ws_ext_sales_price#32, ws_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -376,7 +376,7 @@ BroadcastExchange (68) +- CometScan parquet spark_catalog.default.date_dim (64) -(unknown) Scan parquet spark_catalog.default.date_dim +(64) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#49, d_moy#50] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt index 64f2b3c31..d0f166fe9 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q34/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -61,7 +61,7 @@ Join condition: None Output [4]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4] Input [6]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5, d_date_sk#7] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#8, s_county#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -93,7 +93,7 @@ Join condition: None Output [3]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_ticket_number#4] Input [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, s_store_sk#8] -(unknown) Scan parquet spark_catalog.default.household_demographics +(14) Scan parquet spark_catalog.default.household_demographics Output [4]: [hd_demo_sk#10, hd_buy_potential#11, hd_dep_count#12, hd_vehicle_count#13] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -147,7 +147,7 @@ Results [3]: [ss_ticket_number#4, ss_customer_sk#1, count(1)#16 AS cnt#17] Input [3]: [ss_ticket_number#4, ss_customer_sk#1, cnt#17] Condition : ((cnt#17 >= 15) AND (cnt#17 <= 20)) -(unknown) Scan parquet spark_catalog.default.customer +(25) Scan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -193,7 +193,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(unknown) Scan parquet spark_catalog.default.date_dim +(33) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#23, d_dom#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt index de7514efc..a46018cdd 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q35/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- CometScan parquet spark_catalog.default.customer_demographics (33) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -57,7 +57,7 @@ Condition : (isnotnull(c_current_addr_sk#5) AND isnotnull(c_current_cdemo_sk#4)) (3) ColumnarToRow [codegen id : 9] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -90,7 +90,7 @@ Right keys [1]: [ss_customer_sk#6] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(11) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -123,7 +123,7 @@ Right keys [1]: [ws_bill_customer_sk#10] Join type: ExistenceJoin(exists#2) Join condition: None -(unknown) Scan parquet spark_catalog.default.catalog_sales +(18) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -164,7 +164,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(unknown) Scan parquet spark_catalog.default.customer_address +(27) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_state#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -192,7 +192,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#19] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#18, ca_state#19] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(33) Scan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_dep_count#23, cd_dep_employed_count#24, cd_dep_college_count#25] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -252,7 +252,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(unknown) Scan parquet spark_catalog.default.date_dim +(43) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#74, d_qoy#75] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt index bb2a1b1a5..39b838157 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q36/explain.txt @@ -29,7 +29,7 @@ TakeOrderedAndProject (28) +- CometScan parquet spark_catalog.default.store (13) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -57,7 +57,7 @@ Join condition: None Output [4]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4] Input [6]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5, d_date_sk#7] -(unknown) Scan parquet spark_catalog.default.item +(7) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -85,7 +85,7 @@ Join condition: None Output [5]: [ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, i_class#9, i_category#10] Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, i_item_sk#8, i_class#9, i_category#10] -(unknown) Scan parquet spark_catalog.default.store +(13) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_state#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -169,7 +169,7 @@ BroadcastExchange (33) +- CometScan parquet spark_catalog.default.date_dim (29) -(unknown) Scan parquet spark_catalog.default.date_dim +(29) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_year#28] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt index d13ff264c..23c4ae742 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q37/explain.txt @@ -26,7 +26,7 @@ TakeOrderedAndProject (25) +- CometScan parquet spark_catalog.default.catalog_sales (16) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -44,7 +44,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item (4) ColumnarToRow [codegen id : 3] Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(unknown) Scan parquet spark_catalog.default.inventory +(5) Scan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -94,7 +94,7 @@ Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)),false), [plan_id=2] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(16) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#11, cs_sold_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -154,7 +154,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(unknown) Scan parquet spark_catalog.default.date_dim +(26) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt index 87d960592..5503439ee 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q38/explain.txt @@ -48,7 +48,7 @@ +- ReusedExchange (36) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -76,7 +76,7 @@ Join condition: None Output [2]: [ss_customer_sk#1, d_date#5] Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] -(unknown) Scan parquet spark_catalog.default.customer +(7) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#6, c_first_name#7, c_last_name#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -122,7 +122,7 @@ Functions: [] Aggregate Attributes: [] Results [3]: [c_last_name#8, c_first_name#7, d_date#5] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(16) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#9, cs_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -191,7 +191,7 @@ Right keys [6]: [coalesce(c_last_name#16, ), isnull(c_last_name#16), coalesce(c_ Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(30) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#17, ws_sold_date_sk#18] Batched: true Location: InMemoryFileIndex [] @@ -292,7 +292,7 @@ BroadcastExchange (52) +- CometScan parquet spark_catalog.default.date_dim (48) -(unknown) Scan parquet spark_catalog.default.date_dim +(48) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt index e10ff3340..8a92d8bbc 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39a/explain.txt @@ -45,7 +45,7 @@ +- ReusedExchange (33) -(unknown) Scan parquet spark_catalog.default.inventory +(1) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Condition : (isnotnull(inv_item_sk#1) AND isnotnull(inv_warehouse_sk#2)) (3) ColumnarToRow [codegen id : 4] Input [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [1]: [i_item_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -88,7 +88,7 @@ Join condition: None Output [4]: [inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4, i_item_sk#6] Input [5]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4, i_item_sk#6] -(unknown) Scan parquet spark_catalog.default.warehouse +(10) Scan parquet spark_catalog.default.warehouse Output [2]: [w_warehouse_sk#7, w_warehouse_name#8] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -155,7 +155,7 @@ Condition : CASE WHEN (mean#24 = 0.0) THEN false ELSE ((stdev#23 / mean#24) > 1. Output [5]: [w_warehouse_sk#7, i_item_sk#6, d_moy#10, mean#24, CASE WHEN (mean#24 = 0.0) THEN null ELSE (stdev#23 / mean#24) END AS cov#25] Input [5]: [w_warehouse_sk#7, i_item_sk#6, d_moy#10, stdev#23, mean#24] -(unknown) Scan parquet spark_catalog.default.inventory +(24) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#26, inv_warehouse_sk#27, inv_quantity_on_hand#28, inv_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(unknown) Scan parquet spark_catalog.default.date_dim +(45) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#48, d_moy#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -293,7 +293,7 @@ BroadcastExchange (54) +- CometScan parquet spark_catalog.default.date_dim (50) -(unknown) Scan parquet spark_catalog.default.date_dim +(50) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#34, d_year#49, d_moy#35] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39b/explain.txt index 98e8bc464..48ab98a85 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39b/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q39b/explain.txt @@ -45,7 +45,7 @@ +- ReusedExchange (33) -(unknown) Scan parquet spark_catalog.default.inventory +(1) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Condition : (isnotnull(inv_item_sk#1) AND isnotnull(inv_warehouse_sk#2)) (3) ColumnarToRow [codegen id : 4] Input [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [1]: [i_item_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -88,7 +88,7 @@ Join condition: None Output [4]: [inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4, i_item_sk#6] Input [5]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4, i_item_sk#6] -(unknown) Scan parquet spark_catalog.default.warehouse +(10) Scan parquet spark_catalog.default.warehouse Output [2]: [w_warehouse_sk#7, w_warehouse_name#8] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -155,7 +155,7 @@ Condition : (CASE WHEN (mean#24 = 0.0) THEN false ELSE ((stdev#23 / mean#24) > 1 Output [5]: [w_warehouse_sk#7, i_item_sk#6, d_moy#10, mean#24, CASE WHEN (mean#24 = 0.0) THEN null ELSE (stdev#23 / mean#24) END AS cov#25] Input [5]: [w_warehouse_sk#7, i_item_sk#6, d_moy#10, stdev#23, mean#24] -(unknown) Scan parquet spark_catalog.default.inventory +(24) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#26, inv_warehouse_sk#27, inv_quantity_on_hand#28, inv_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -263,7 +263,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (45) -(unknown) Scan parquet spark_catalog.default.date_dim +(45) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#48, d_moy#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -293,7 +293,7 @@ BroadcastExchange (54) +- CometScan parquet spark_catalog.default.date_dim (50) -(unknown) Scan parquet spark_catalog.default.date_dim +(50) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#34, d_year#49, d_moy#35] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q4/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q4/explain.txt index 9b2bb4991..a854a1041 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q4/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q4/explain.txt @@ -109,7 +109,7 @@ TakeOrderedAndProject (108) +- ReusedExchange (99) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -123,7 +123,7 @@ Condition : (isnotnull(c_customer_sk#1) AND isnotnull(c_customer_id#2)) (3) ColumnarToRow [codegen id : 3] Input [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_customer_sk#9, ss_ext_discount_amt#10, ss_ext_sales_price#11, ss_ext_wholesale_cost#12, ss_ext_list_price#13, ss_sold_date_sk#14] Batched: true Location: InMemoryFileIndex [] @@ -187,7 +187,7 @@ Results [2]: [c_customer_id#2 AS customer_id#23, sum(((((ss_ext_list_price#13 - Input [2]: [customer_id#23, year_total#24] Condition : (isnotnull(year_total#24) AND (year_total#24 > 0.000000)) -(unknown) Scan parquet spark_catalog.default.customer +(17) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#25, c_customer_id#26, c_first_name#27, c_last_name#28, c_preferred_cust_flag#29, c_birth_country#30, c_login#31, c_email_address#32] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -201,7 +201,7 @@ Condition : (isnotnull(c_customer_sk#25) AND isnotnull(c_customer_id#26)) (19) ColumnarToRow [codegen id : 6] Input [8]: [c_customer_sk#25, c_customer_id#26, c_first_name#27, c_last_name#28, c_preferred_cust_flag#29, c_birth_country#30, c_login#31, c_email_address#32] -(unknown) Scan parquet spark_catalog.default.store_sales +(20) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_customer_sk#33, ss_ext_discount_amt#34, ss_ext_sales_price#35, ss_ext_wholesale_cost#36, ss_ext_list_price#37, ss_sold_date_sk#38] Batched: true Location: InMemoryFileIndex [] @@ -271,7 +271,7 @@ Right keys [1]: [customer_id#46] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.customer +(34) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#54, c_customer_id#55, c_first_name#56, c_last_name#57, c_preferred_cust_flag#58, c_birth_country#59, c_login#60, c_email_address#61] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -285,7 +285,7 @@ Condition : (isnotnull(c_customer_sk#54) AND isnotnull(c_customer_id#55)) (36) ColumnarToRow [codegen id : 10] Input [8]: [c_customer_sk#54, c_customer_id#55, c_first_name#56, c_last_name#57, c_preferred_cust_flag#58, c_birth_country#59, c_login#60, c_email_address#61] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(37) Scan parquet spark_catalog.default.catalog_sales Output [6]: [cs_bill_customer_sk#62, cs_ext_discount_amt#63, cs_ext_sales_price#64, cs_ext_wholesale_cost#65, cs_ext_list_price#66, cs_sold_date_sk#67] Batched: true Location: InMemoryFileIndex [] @@ -363,7 +363,7 @@ Join condition: None Output [11]: [customer_id#23, year_total#24, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#53, year_total#77] Input [12]: [customer_id#23, year_total#24, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#53, customer_id#76, year_total#77] -(unknown) Scan parquet spark_catalog.default.customer +(53) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#78, c_customer_id#79, c_first_name#80, c_last_name#81, c_preferred_cust_flag#82, c_birth_country#83, c_login#84, c_email_address#85] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -377,7 +377,7 @@ Condition : (isnotnull(c_customer_sk#78) AND isnotnull(c_customer_id#79)) (55) ColumnarToRow [codegen id : 14] Input [8]: [c_customer_sk#78, c_customer_id#79, c_first_name#80, c_last_name#81, c_preferred_cust_flag#82, c_birth_country#83, c_login#84, c_email_address#85] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(56) Scan parquet spark_catalog.default.catalog_sales Output [6]: [cs_bill_customer_sk#86, cs_ext_discount_amt#87, cs_ext_sales_price#88, cs_ext_wholesale_cost#89, cs_ext_list_price#90, cs_sold_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -451,7 +451,7 @@ Join condition: (CASE WHEN (year_total#77 > 0.000000) THEN (year_total#100 / yea Output [10]: [customer_id#23, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#77, year_total#100] Input [13]: [customer_id#23, year_total#24, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#53, year_total#77, customer_id#99, year_total#100] -(unknown) Scan parquet spark_catalog.default.customer +(71) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#101, c_customer_id#102, c_first_name#103, c_last_name#104, c_preferred_cust_flag#105, c_birth_country#106, c_login#107, c_email_address#108] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -465,7 +465,7 @@ Condition : (isnotnull(c_customer_sk#101) AND isnotnull(c_customer_id#102)) (73) ColumnarToRow [codegen id : 18] Input [8]: [c_customer_sk#101, c_customer_id#102, c_first_name#103, c_last_name#104, c_preferred_cust_flag#105, c_birth_country#106, c_login#107, c_email_address#108] -(unknown) Scan parquet spark_catalog.default.web_sales +(74) Scan parquet spark_catalog.default.web_sales Output [6]: [ws_bill_customer_sk#109, ws_ext_discount_amt#110, ws_ext_sales_price#111, ws_ext_wholesale_cost#112, ws_ext_list_price#113, ws_sold_date_sk#114] Batched: true Location: InMemoryFileIndex [] @@ -543,7 +543,7 @@ Join condition: None Output [11]: [customer_id#23, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#77, year_total#100, year_total#124] Input [12]: [customer_id#23, customer_id#46, customer_first_name#47, customer_last_name#48, customer_preferred_cust_flag#49, customer_birth_country#50, customer_login#51, customer_email_address#52, year_total#77, year_total#100, customer_id#123, year_total#124] -(unknown) Scan parquet spark_catalog.default.customer +(90) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#125, c_customer_id#126, c_first_name#127, c_last_name#128, c_preferred_cust_flag#129, c_birth_country#130, c_login#131, c_email_address#132] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -557,7 +557,7 @@ Condition : (isnotnull(c_customer_sk#125) AND isnotnull(c_customer_id#126)) (92) ColumnarToRow [codegen id : 22] Input [8]: [c_customer_sk#125, c_customer_id#126, c_first_name#127, c_last_name#128, c_preferred_cust_flag#129, c_birth_country#130, c_login#131, c_email_address#132] -(unknown) Scan parquet spark_catalog.default.web_sales +(93) Scan parquet spark_catalog.default.web_sales Output [6]: [ws_bill_customer_sk#133, ws_ext_discount_amt#134, ws_ext_sales_price#135, ws_ext_wholesale_cost#136, ws_ext_list_price#137, ws_sold_date_sk#138] Batched: true Location: InMemoryFileIndex [] @@ -644,7 +644,7 @@ BroadcastExchange (112) +- CometScan parquet spark_catalog.default.date_dim (109) -(unknown) Scan parquet spark_catalog.default.date_dim +(109) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -669,7 +669,7 @@ BroadcastExchange (116) +- CometScan parquet spark_catalog.default.date_dim (113) -(unknown) Scan parquet spark_catalog.default.date_dim +(113) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#40, d_year#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt index fda4bbae0..f63b94658 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q40/explain.txt @@ -34,7 +34,7 @@ TakeOrderedAndProject (33) +- ReusedExchange (27) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [5]: [cs_warehouse_sk#1, cs_item_sk#2, cs_order_number#3, cs_sales_price#4, cs_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -57,7 +57,7 @@ Arguments: hashpartitioning(cs_order_number#3, cs_item_sk#2, 5), ENSURE_REQUIREM Input [5]: [cs_warehouse_sk#1, cs_item_sk#2, cs_order_number#3, cs_sales_price#4, cs_sold_date_sk#5] Arguments: [cs_order_number#3 ASC NULLS FIRST, cs_item_sk#2 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(6) Scan parquet spark_catalog.default.catalog_returns Output [4]: [cr_item_sk#7, cr_order_number#8, cr_refunded_cash#9, cr_returned_date_sk#10] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -93,7 +93,7 @@ Join condition: None Output [5]: [cs_warehouse_sk#1, cs_item_sk#2, cs_sales_price#4, cs_sold_date_sk#5, cr_refunded_cash#9] Input [8]: [cs_warehouse_sk#1, cs_item_sk#2, cs_order_number#3, cs_sales_price#4, cs_sold_date_sk#5, cr_item_sk#7, cr_order_number#8, cr_refunded_cash#9] -(unknown) Scan parquet spark_catalog.default.warehouse +(14) Scan parquet spark_catalog.default.warehouse Output [2]: [w_warehouse_sk#11, w_state#12] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -121,7 +121,7 @@ Join condition: None Output [5]: [cs_item_sk#2, cs_sales_price#4, cs_sold_date_sk#5, cr_refunded_cash#9, w_state#12] Input [7]: [cs_warehouse_sk#1, cs_item_sk#2, cs_sales_price#4, cs_sold_date_sk#5, cr_refunded_cash#9, w_warehouse_sk#11, w_state#12] -(unknown) Scan parquet spark_catalog.default.item +(20) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#13, i_item_id#14, i_current_price#15] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -197,7 +197,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (34) -(unknown) Scan parquet spark_catalog.default.date_dim +(34) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_date#17] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q41/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q41/explain.txt index 7174e2746..07196ba8c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q41/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q41/explain.txt @@ -21,7 +21,7 @@ TakeOrderedAndProject (20) +- CometScan parquet spark_catalog.default.item (5) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [3]: [i_manufact_id#1, i_manufact#2, i_product_name#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -39,7 +39,7 @@ Arguments: [i_manufact#2, i_product_name#3], [i_manufact#2, i_product_name#3] (4) ColumnarToRow [codegen id : 3] Input [2]: [i_manufact#2, i_product_name#3] -(unknown) Scan parquet spark_catalog.default.item +(5) Scan parquet spark_catalog.default.item Output [5]: [i_category#4, i_manufact#5, i_size#6, i_color#7, i_units#8] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt index 958f358b7..d51d63d8a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q42/explain.txt @@ -22,7 +22,7 @@ TakeOrderedAndProject (21) +- CometScan parquet spark_catalog.default.item (11) -(unknown) Scan parquet spark_catalog.default.date_dim +(1) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_moy#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -40,7 +40,7 @@ Arguments: [d_date_sk#1, d_year#2], [d_date_sk#1, d_year#2] (4) ColumnarToRow [codegen id : 3] Input [2]: [d_date_sk#1, d_year#2] -(unknown) Scan parquet spark_catalog.default.store_sales +(5) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -69,7 +69,7 @@ Join condition: None Output [3]: [d_year#2, ss_item_sk#4, ss_ext_sales_price#5] Input [5]: [d_date_sk#1, d_year#2, ss_item_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] -(unknown) Scan parquet spark_catalog.default.item +(11) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#7, i_category_id#8, i_category#9, i_manager_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt index 68b4b5400..e892aa469 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q43/explain.txt @@ -22,7 +22,7 @@ TakeOrderedAndProject (21) +- CometScan parquet spark_catalog.default.store (11) -(unknown) Scan parquet spark_catalog.default.date_dim +(1) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#1, d_year#2, d_day_name#3] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -40,7 +40,7 @@ Arguments: [d_date_sk#1, d_day_name#3], [d_date_sk#1, d_day_name#3] (4) ColumnarToRow [codegen id : 3] Input [2]: [d_date_sk#1, d_day_name#3] -(unknown) Scan parquet spark_catalog.default.store_sales +(5) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -69,7 +69,7 @@ Join condition: None Output [3]: [d_day_name#3, ss_store_sk#4, ss_sales_price#5] Input [5]: [d_date_sk#1, d_day_name#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] -(unknown) Scan parquet spark_catalog.default.store +(11) Scan parquet spark_catalog.default.store Output [4]: [s_store_sk#7, s_store_id#8, s_store_name#9, s_gmt_offset#10] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt index 7a7d66737..8408eab0e 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q44/explain.txt @@ -33,7 +33,7 @@ TakeOrderedAndProject (32) +- ReusedExchange (29) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_store_sk#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -128,7 +128,7 @@ Join condition: None Output [3]: [item_sk#8, rnk#12, item_sk#13] Input [4]: [item_sk#8, rnk#12, item_sk#13, rnk#15] -(unknown) Scan parquet spark_catalog.default.item +(23) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#16, i_product_name#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -185,7 +185,7 @@ Subquery:1 Hosting operator id = 8 Hosting Expression = Subquery scalar-subquery +- CometScan parquet spark_catalog.default.store_sales (33) -(unknown) Scan parquet spark_catalog.default.store_sales +(33) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_addr_sk#22, ss_store_sk#23, ss_net_profit#24, ss_sold_date_sk#25] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt index d5d860a39..d0d74569b 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q45/explain.txt @@ -37,7 +37,7 @@ TakeOrderedAndProject (36) +- CometScan parquet spark_catalog.default.item (25) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#2, ws_bill_customer_sk#3, ws_sales_price#4, ws_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -52,7 +52,7 @@ Condition : (isnotnull(ws_bill_customer_sk#3) AND isnotnull(ws_item_sk#2)) (3) ColumnarToRow [codegen id : 6] Input [4]: [ws_item_sk#2, ws_bill_customer_sk#3, ws_sales_price#4, ws_sold_date_sk#5] -(unknown) Scan parquet spark_catalog.default.customer +(4) Scan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#7, c_current_addr_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -80,7 +80,7 @@ Join condition: None Output [4]: [ws_item_sk#2, ws_sales_price#4, ws_sold_date_sk#5, c_current_addr_sk#8] Input [6]: [ws_item_sk#2, ws_bill_customer_sk#3, ws_sales_price#4, ws_sold_date_sk#5, c_customer_sk#7, c_current_addr_sk#8] -(unknown) Scan parquet spark_catalog.default.customer_address +(10) Scan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#9, ca_city#10, ca_zip#11] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -121,7 +121,7 @@ Join condition: None Output [4]: [ws_item_sk#2, ws_sales_price#4, ca_city#10, ca_zip#11] Input [6]: [ws_item_sk#2, ws_sales_price#4, ws_sold_date_sk#5, ca_city#10, ca_zip#11, d_date_sk#12] -(unknown) Scan parquet spark_catalog.default.item +(19) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#13, i_item_id#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -149,7 +149,7 @@ Join condition: None Output [4]: [ws_sales_price#4, ca_city#10, ca_zip#11, i_item_id#14] Input [6]: [ws_item_sk#2, ws_sales_price#4, ca_city#10, ca_zip#11, i_item_sk#13, i_item_id#14] -(unknown) Scan parquet spark_catalog.default.item +(25) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#15, i_item_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -217,7 +217,7 @@ BroadcastExchange (41) +- CometScan parquet spark_catalog.default.date_dim (37) -(unknown) Scan parquet spark_catalog.default.date_dim +(37) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#12, d_year#21, d_qoy#22] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q46/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q46/explain.txt index 84d65306e..e07e2ab24 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q46/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q46/explain.txt @@ -40,7 +40,7 @@ TakeOrderedAndProject (39) +- ReusedExchange (36) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [8]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -68,7 +68,7 @@ Join condition: None Output [7]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7] Input [9]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, ss_sold_date_sk#8, d_date_sk#10] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_city#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -100,7 +100,7 @@ Join condition: None Output [6]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7] Input [8]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, s_store_sk#11] -(unknown) Scan parquet spark_catalog.default.household_demographics +(14) Scan parquet spark_catalog.default.household_demographics Output [3]: [hd_demo_sk#13, hd_dep_count#14, hd_vehicle_count#15] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -132,7 +132,7 @@ Join condition: None Output [5]: [ss_customer_sk#1, ss_addr_sk#3, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7] Input [7]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, hd_demo_sk#13] -(unknown) Scan parquet spark_catalog.default.customer_address +(21) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_city#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -178,7 +178,7 @@ Functions [2]: [sum(UnscaledValue(ss_coupon_amt#6)), sum(UnscaledValue(ss_net_pr Aggregate Attributes [2]: [sum(UnscaledValue(ss_coupon_amt#6))#22, sum(UnscaledValue(ss_net_profit#7))#23] Results [5]: [ss_ticket_number#5, ss_customer_sk#1, ca_city#17 AS bought_city#24, MakeDecimal(sum(UnscaledValue(ss_coupon_amt#6))#22,17,2) AS amt#25, MakeDecimal(sum(UnscaledValue(ss_net_profit#7))#23,17,2) AS profit#26] -(unknown) Scan parquet spark_catalog.default.customer +(30) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#27, c_current_addr_sk#28, c_first_name#29, c_last_name#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -233,7 +233,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(unknown) Scan parquet spark_catalog.default.date_dim +(40) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#33, d_dow#34] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q47/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q47/explain.txt index 9ea57de06..6185a64a6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q47/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q47/explain.txt @@ -46,7 +46,7 @@ TakeOrderedAndProject (45) +- ReusedExchange (38) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -60,7 +60,7 @@ Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull( (3) ColumnarToRow [codegen id : 4] Input [3]: [i_item_sk#1, i_brand#2, i_category#3] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -102,7 +102,7 @@ Join condition: None Output [6]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] -(unknown) Scan parquet spark_catalog.default.store +(13) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -258,7 +258,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (46) -(unknown) Scan parquet spark_catalog.default.date_dim +(46) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt index a13a86a91..718f3fb31 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q48/explain.txt @@ -29,7 +29,7 @@ +- ReusedExchange (23) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [7]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -44,7 +44,7 @@ Condition : ((((isnotnull(ss_store_sk#3) AND isnotnull(ss_cdemo_sk#1)) AND isnot (3) ColumnarToRow [codegen id : 5] Input [7]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] -(unknown) Scan parquet spark_catalog.default.store +(4) Scan parquet spark_catalog.default.store Output [1]: [s_store_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -72,7 +72,7 @@ Join condition: None Output [6]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Input [8]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_store_sk#3, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, s_store_sk#9] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(10) Scan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -100,7 +100,7 @@ Join condition: ((((((cd_marital_status#11 = M) AND (cd_education_status#12 = 4 Output [4]: [ss_addr_sk#2, ss_quantity#4, ss_net_profit#6, ss_sold_date_sk#7] Input [9]: [ss_cdemo_sk#1, ss_addr_sk#2, ss_quantity#4, ss_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, cd_demo_sk#10, cd_marital_status#11, cd_education_status#12] -(unknown) Scan parquet spark_catalog.default.customer_address +(16) Scan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#13, ca_state#14, ca_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -173,7 +173,7 @@ BroadcastExchange (33) +- CometScan parquet spark_catalog.default.date_dim (29) -(unknown) Scan parquet spark_catalog.default.date_dim +(29) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#16, d_year#21] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt index 3e14838b8..bbb550e05 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q49/explain.txt @@ -75,7 +75,7 @@ TakeOrderedAndProject (74) +- ReusedExchange (57) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -95,7 +95,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(unknown) Scan parquet spark_catalog.default.web_returns +(5) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -181,7 +181,7 @@ Condition : ((return_rank#33 <= 10) OR (currency_rank#34 <= 10)) Output [5]: [web AS channel#35, item#30, return_ratio#31, return_rank#33, currency_rank#34] Input [5]: [item#30, return_ratio#31, currency_ratio#32, return_rank#33, currency_rank#34] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(24) Scan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, cs_net_profit#40, cs_sold_date_sk#41] Batched: true Location: InMemoryFileIndex [] @@ -201,7 +201,7 @@ Arguments: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, c Input [5]: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, cs_sold_date_sk#41] Arguments: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, cs_sold_date_sk#41] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(28) Scan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#43, cr_order_number#44, cr_return_quantity#45, cr_return_amount#46, cr_returned_date_sk#47] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -287,7 +287,7 @@ Condition : ((return_rank#68 <= 10) OR (currency_rank#69 <= 10)) Output [5]: [catalog AS channel#70, item#65, return_ratio#66, return_rank#68, currency_rank#69] Input [5]: [item#65, return_ratio#66, currency_ratio#67, return_rank#68, currency_rank#69] -(unknown) Scan parquet spark_catalog.default.store_sales +(47) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, ss_net_profit#75, ss_sold_date_sk#76] Batched: true Location: InMemoryFileIndex [] @@ -307,7 +307,7 @@ Arguments: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, Input [5]: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, ss_sold_date_sk#76] Arguments: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, ss_sold_date_sk#76] -(unknown) Scan parquet spark_catalog.default.store_returns +(51) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#78, sr_ticket_number#79, sr_return_quantity#80, sr_return_amt#81, sr_returned_date_sk#82] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -427,7 +427,7 @@ BroadcastExchange (79) +- CometScan parquet spark_catalog.default.date_dim (75) -(unknown) Scan parquet spark_catalog.default.date_dim +(75) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#106, d_moy#107] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt index 10ba5439e..e3f7538d1 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q5/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- CometScan parquet spark_catalog.default.web_site (56) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -87,7 +87,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(unknown) Scan parquet spark_catalog.default.store_returns +(4) Scan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -123,7 +123,7 @@ Join condition: None Output [5]: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] -(unknown) Scan parquet spark_catalog.default.store +(12) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#23, s_store_id#24] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -169,7 +169,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#33, sum(UnscaledValue(return_amt#10))#34, sum(UnscaledValue(profit#9))#35, sum(UnscaledValue(net_loss#11))#36] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#8))#33,17,2) AS sales#37, MakeDecimal(sum(UnscaledValue(return_amt#10))#34,17,2) AS returns#38, (MakeDecimal(sum(UnscaledValue(profit#9))#35,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#36,17,2)) AS profit#39, store channel AS channel#40, concat(store, s_store_id#24) AS id#41] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(21) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Batched: true Location: InMemoryFileIndex [] @@ -185,7 +185,7 @@ Condition : isnotnull(cs_catalog_page_sk#42) Input [4]: [cs_catalog_page_sk#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Arguments: [page_sk#47, date_sk#48, sales_price#49, profit#50, return_amt#51, net_loss#52], [cs_catalog_page_sk#42 AS page_sk#47, cs_sold_date_sk#45 AS date_sk#48, cs_ext_sales_price#43 AS sales_price#49, cs_net_profit#44 AS profit#50, 0.00 AS return_amt#51, 0.00 AS net_loss#52] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(24) Scan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#53, cr_return_amount#54, cr_net_loss#55, cr_returned_date_sk#56] Batched: true Location: InMemoryFileIndex [] @@ -221,7 +221,7 @@ Join condition: None Output [5]: [page_sk#47, sales_price#49, profit#50, return_amt#51, net_loss#52] Input [7]: [page_sk#47, date_sk#48, sales_price#49, profit#50, return_amt#51, net_loss#52, d_date_sk#63] -(unknown) Scan parquet spark_catalog.default.catalog_page +(32) Scan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#64, cp_catalog_page_id#65] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -267,7 +267,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#49)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#49))#74, sum(UnscaledValue(return_amt#51))#75, sum(UnscaledValue(profit#50))#76, sum(UnscaledValue(net_loss#52))#77] Results [5]: [MakeDecimal(sum(UnscaledValue(sales_price#49))#74,17,2) AS sales#78, MakeDecimal(sum(UnscaledValue(return_amt#51))#75,17,2) AS returns#79, (MakeDecimal(sum(UnscaledValue(profit#50))#76,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#52))#77,17,2)) AS profit#80, catalog channel AS channel#81, concat(catalog_page, cp_catalog_page_id#65) AS id#82] -(unknown) Scan parquet spark_catalog.default.web_sales +(41) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#83, ws_ext_sales_price#84, ws_net_profit#85, ws_sold_date_sk#86] Batched: true Location: InMemoryFileIndex [] @@ -283,7 +283,7 @@ Condition : isnotnull(ws_web_site_sk#83) Input [4]: [ws_web_site_sk#83, ws_ext_sales_price#84, ws_net_profit#85, ws_sold_date_sk#86] Arguments: [wsr_web_site_sk#88, date_sk#89, sales_price#90, profit#91, return_amt#92, net_loss#93], [ws_web_site_sk#83 AS wsr_web_site_sk#88, ws_sold_date_sk#86 AS date_sk#89, ws_ext_sales_price#84 AS sales_price#90, ws_net_profit#85 AS profit#91, 0.00 AS return_amt#92, 0.00 AS net_loss#93] -(unknown) Scan parquet spark_catalog.default.web_returns +(44) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#94, wr_order_number#95, wr_return_amt#96, wr_net_loss#97, wr_returned_date_sk#98] Batched: true Location: InMemoryFileIndex [] @@ -294,7 +294,7 @@ ReadSchema: struct= (0.9 * cs_item_rev# Output [3]: [item_id#11, ss_item_rev#12, cs_item_rev#24] Input [4]: [item_id#11, ss_item_rev#12, item_id#23, cs_item_rev#24] -(unknown) Scan parquet spark_catalog.default.web_sales +(33) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#25, ws_ext_sales_price#26, ws_sold_date_sk#27] Batched: true Location: InMemoryFileIndex [] @@ -302,7 +302,7 @@ BroadcastExchange (60) +- CometScan parquet spark_catalog.default.date_dim (53) -(unknown) Scan parquet spark_catalog.default.date_dim +(50) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -316,7 +316,7 @@ Condition : isnotnull(d_date_sk#7) (52) ColumnarToRow [codegen id : 2] Input [2]: [d_date_sk#7, d_date#41] -(unknown) Scan parquet spark_catalog.default.date_dim +(53) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date#42, d_week_seq#43] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -359,7 +359,7 @@ Subquery:2 Hosting operator id = 54 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (61) -(unknown) Scan parquet spark_catalog.default.date_dim +(61) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date#46, d_week_seq#47] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt index 62b3f5868..9bfb087f7 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q59/explain.txt @@ -45,7 +45,7 @@ TakeOrderedAndProject (44) +- CometScan parquet spark_catalog.default.date_dim (34) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -60,7 +60,7 @@ Condition : isnotnull(ss_store_sk#1) (3) ColumnarToRow [codegen id : 2] Input [3]: [ss_store_sk#1, ss_sales_price#2, ss_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.date_dim +(4) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_week_seq#5, d_day_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -106,7 +106,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_s Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END))#21, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END))#22, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END))#23, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END))#24, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END))#25, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))#27] Results [9]: [d_week_seq#5, ss_store_sk#1, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END))#21,17,2) AS sun_sales#28, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END))#22,17,2) AS mon_sales#29, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END))#23,17,2) AS tue_sales#30, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END))#24,17,2) AS wed_sales#31, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END))#25,17,2) AS thu_sales#32, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END))#26,17,2) AS fri_sales#33, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))#27,17,2) AS sat_sales#34] -(unknown) Scan parquet spark_catalog.default.store +(13) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#35, s_store_id#36, s_store_name#37] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -134,7 +134,7 @@ Join condition: None Output [10]: [d_week_seq#5, sun_sales#28, mon_sales#29, tue_sales#30, wed_sales#31, thu_sales#32, fri_sales#33, sat_sales#34, s_store_id#36, s_store_name#37] Input [12]: [d_week_seq#5, ss_store_sk#1, sun_sales#28, mon_sales#29, tue_sales#30, wed_sales#31, thu_sales#32, fri_sales#33, sat_sales#34, s_store_sk#35, s_store_id#36, s_store_name#37] -(unknown) Scan parquet spark_catalog.default.date_dim +(19) Scan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#38, d_week_seq#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -176,7 +176,7 @@ Functions [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_s Aggregate Attributes [7]: [sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END))#21, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END))#22, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END))#23, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END))#24, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END))#25, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END))#26, sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))#27] Results [9]: [d_week_seq#5, ss_store_sk#1, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Sunday ) THEN ss_sales_price#2 END))#21,17,2) AS sun_sales#28, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Monday ) THEN ss_sales_price#2 END))#22,17,2) AS mon_sales#29, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Tuesday ) THEN ss_sales_price#2 END))#23,17,2) AS tue_sales#30, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Wednesday) THEN ss_sales_price#2 END))#24,17,2) AS wed_sales#31, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Thursday ) THEN ss_sales_price#2 END))#25,17,2) AS thu_sales#32, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Friday ) THEN ss_sales_price#2 END))#26,17,2) AS fri_sales#33, MakeDecimal(sum(UnscaledValue(CASE WHEN (d_day_name#6 = Saturday ) THEN ss_sales_price#2 END))#27,17,2) AS sat_sales#34] -(unknown) Scan parquet spark_catalog.default.store +(28) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#57, s_store_id#58] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -204,7 +204,7 @@ Join condition: None Output [9]: [d_week_seq#5, sun_sales#28, mon_sales#29, tue_sales#30, wed_sales#31, thu_sales#32, fri_sales#33, sat_sales#34, s_store_id#58] Input [11]: [d_week_seq#5, ss_store_sk#1, sun_sales#28, mon_sales#29, tue_sales#30, wed_sales#31, thu_sales#32, fri_sales#33, sat_sales#34, s_store_sk#57, s_store_id#58] -(unknown) Scan parquet spark_catalog.default.date_dim +(34) Scan parquet spark_catalog.default.date_dim Output [2]: [d_month_seq#59, d_week_seq#60] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt index cc4720a19..608154948 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q6/explain.txt @@ -40,7 +40,7 @@ TakeOrderedAndProject (39) +- CometScan parquet spark_catalog.default.item (22) -(unknown) Scan parquet spark_catalog.default.customer_address +(1) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#1, ca_state#2] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -54,7 +54,7 @@ Condition : isnotnull(ca_address_sk#1) (3) ColumnarToRow [codegen id : 7] Input [2]: [ca_address_sk#1, ca_state#2] -(unknown) Scan parquet spark_catalog.default.customer +(4) Scan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#3, c_current_addr_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -82,7 +82,7 @@ Join condition: None Output [2]: [ca_state#2, c_customer_sk#3] Input [4]: [ca_address_sk#1, ca_state#2, c_customer_sk#3, c_current_addr_sk#4] -(unknown) Scan parquet spark_catalog.default.store_sales +(10) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#5, ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -124,7 +124,7 @@ Join condition: None Output [2]: [ca_state#2, ss_item_sk#5] Input [4]: [ca_state#2, ss_item_sk#5, ss_sold_date_sk#7, d_date_sk#9] -(unknown) Scan parquet spark_catalog.default.item +(19) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#10, i_current_price#11, i_category#12] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -138,7 +138,7 @@ Condition : ((isnotnull(i_current_price#11) AND isnotnull(i_category#12)) AND is (21) ColumnarToRow [codegen id : 6] Input [3]: [i_item_sk#10, i_current_price#11, i_category#12] -(unknown) Scan parquet spark_catalog.default.item +(22) Scan parquet spark_catalog.default.item Output [2]: [i_current_price#13, i_category#14] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -236,7 +236,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(unknown) Scan parquet spark_catalog.default.date_dim +(40) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_month_seq#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -268,7 +268,7 @@ Subquery:2 Hosting operator id = 41 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.date_dim (45) -(unknown) Scan parquet spark_catalog.default.date_dim +(45) Scan parquet spark_catalog.default.date_dim Output [3]: [d_month_seq#27, d_year#28, d_moy#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt index 8885bc8e5..78f4b27ac 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q60/explain.txt @@ -64,7 +64,7 @@ TakeOrderedAndProject (63) +- ReusedExchange (53) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -92,7 +92,7 @@ Join condition: None Output [3]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3] Input [5]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4, d_date_sk#6] -(unknown) Scan parquet spark_catalog.default.customer_address +(7) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#7, ca_gmt_offset#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -124,7 +124,7 @@ Join condition: None Output [2]: [ss_item_sk#1, ss_ext_sales_price#3] Input [4]: [ss_item_sk#1, ss_addr_sk#2, ss_ext_sales_price#3, ca_address_sk#7] -(unknown) Scan parquet spark_catalog.default.item +(14) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#9, i_item_id#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -138,7 +138,7 @@ Condition : isnotnull(i_item_sk#9) (16) ColumnarToRow [codegen id : 4] Input [2]: [i_item_sk#9, i_item_id#10] -(unknown) Scan parquet spark_catalog.default.item +(17) Scan parquet spark_catalog.default.item Output [2]: [i_item_id#11, i_category#12] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -198,7 +198,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#3))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#3))#15] Results [2]: [i_item_id#10, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#3))#15,17,2) AS total_sales#16] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(29) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_bill_addr_sk#17, cs_item_sk#18, cs_ext_sales_price#19, cs_sold_date_sk#20] Batched: true Location: InMemoryFileIndex [] @@ -270,7 +270,7 @@ Functions [1]: [sum(UnscaledValue(cs_ext_sales_price#19))] Aggregate Attributes [1]: [sum(UnscaledValue(cs_ext_sales_price#19))#28] Results [2]: [i_item_id#25, MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#19))#28,17,2) AS total_sales#29] -(unknown) Scan parquet spark_catalog.default.web_sales +(44) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#30, ws_bill_addr_sk#31, ws_ext_sales_price#32, ws_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -376,7 +376,7 @@ BroadcastExchange (68) +- CometScan parquet spark_catalog.default.date_dim (64) -(unknown) Scan parquet spark_catalog.default.date_dim +(64) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#6, d_year#49, d_moy#50] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt index ee9a8409a..766362167 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q61/explain.txt @@ -68,7 +68,7 @@ +- ReusedExchange (59) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_promo_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -83,7 +83,7 @@ Condition : (((isnotnull(ss_store_sk#3) AND isnotnull(ss_promo_sk#4)) AND isnotn (3) ColumnarToRow [codegen id : 7] Input [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_promo_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] -(unknown) Scan parquet spark_catalog.default.store +(4) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#8, s_gmt_offset#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -115,7 +115,7 @@ Join condition: None Output [5]: [ss_item_sk#1, ss_customer_sk#2, ss_promo_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6] Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_promo_sk#4, ss_ext_sales_price#5, ss_sold_date_sk#6, s_store_sk#8] -(unknown) Scan parquet spark_catalog.default.promotion +(11) Scan parquet spark_catalog.default.promotion Output [4]: [p_promo_sk#10, p_channel_dmail#11, p_channel_email#12, p_channel_tv#13] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -160,7 +160,7 @@ Join condition: None Output [3]: [ss_item_sk#1, ss_customer_sk#2, ss_ext_sales_price#5] Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_ext_sales_price#5, ss_sold_date_sk#6, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.customer +(21) Scan parquet spark_catalog.default.customer Output [2]: [c_customer_sk#15, c_current_addr_sk#16] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -188,7 +188,7 @@ Join condition: None Output [3]: [ss_item_sk#1, ss_ext_sales_price#5, c_current_addr_sk#16] Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_ext_sales_price#5, c_customer_sk#15, c_current_addr_sk#16] -(unknown) Scan parquet spark_catalog.default.customer_address +(27) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#17, ca_gmt_offset#18] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -220,7 +220,7 @@ Join condition: None Output [2]: [ss_item_sk#1, ss_ext_sales_price#5] Input [4]: [ss_item_sk#1, ss_ext_sales_price#5, c_current_addr_sk#16, ca_address_sk#17] -(unknown) Scan parquet spark_catalog.default.item +(34) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#19, i_category#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -270,7 +270,7 @@ Functions [1]: [sum(UnscaledValue(ss_ext_sales_price#5))] Aggregate Attributes [1]: [sum(UnscaledValue(ss_ext_sales_price#5))#23] Results [1]: [MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#5))#23,17,2) AS promotions#24] -(unknown) Scan parquet spark_catalog.default.store_sales +(44) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#25, ss_customer_sk#26, ss_store_sk#27, ss_ext_sales_price#28, ss_sold_date_sk#29] Batched: true Location: InMemoryFileIndex [] @@ -390,7 +390,7 @@ BroadcastExchange (72) +- CometScan parquet spark_catalog.default.date_dim (68) -(unknown) Scan parquet spark_catalog.default.date_dim +(68) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#14, d_year#42, d_moy#43] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q62/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q62/explain.txt index 82ded1c50..0607d8077 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q62/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q62/explain.txt @@ -33,7 +33,7 @@ TakeOrderedAndProject (32) +- CometScan parquet spark_catalog.default.date_dim (22) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [5]: [ws_ship_date_sk#1, ws_web_site_sk#2, ws_ship_mode_sk#3, ws_warehouse_sk#4, ws_sold_date_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -47,7 +47,7 @@ Condition : (((isnotnull(ws_warehouse_sk#4) AND isnotnull(ws_ship_mode_sk#3)) AN (3) ColumnarToRow [codegen id : 5] Input [5]: [ws_ship_date_sk#1, ws_web_site_sk#2, ws_ship_mode_sk#3, ws_warehouse_sk#4, ws_sold_date_sk#5] -(unknown) Scan parquet spark_catalog.default.warehouse +(4) Scan parquet spark_catalog.default.warehouse Output [2]: [w_warehouse_sk#6, w_warehouse_name#7] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -75,7 +75,7 @@ Join condition: None Output [5]: [ws_ship_date_sk#1, ws_web_site_sk#2, ws_ship_mode_sk#3, ws_sold_date_sk#5, w_warehouse_name#7] Input [7]: [ws_ship_date_sk#1, ws_web_site_sk#2, ws_ship_mode_sk#3, ws_warehouse_sk#4, ws_sold_date_sk#5, w_warehouse_sk#6, w_warehouse_name#7] -(unknown) Scan parquet spark_catalog.default.ship_mode +(10) Scan parquet spark_catalog.default.ship_mode Output [2]: [sm_ship_mode_sk#8, sm_type#9] Batched: true Location [not included in comparison]/{warehouse_dir}/ship_mode] @@ -103,7 +103,7 @@ Join condition: None Output [5]: [ws_ship_date_sk#1, ws_web_site_sk#2, ws_sold_date_sk#5, w_warehouse_name#7, sm_type#9] Input [7]: [ws_ship_date_sk#1, ws_web_site_sk#2, ws_ship_mode_sk#3, ws_sold_date_sk#5, w_warehouse_name#7, sm_ship_mode_sk#8, sm_type#9] -(unknown) Scan parquet spark_catalog.default.web_site +(16) Scan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#10, web_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] @@ -131,7 +131,7 @@ Join condition: None Output [5]: [ws_ship_date_sk#1, ws_sold_date_sk#5, w_warehouse_name#7, sm_type#9, web_name#11] Input [7]: [ws_ship_date_sk#1, ws_web_site_sk#2, ws_sold_date_sk#5, w_warehouse_name#7, sm_type#9, web_site_sk#10, web_name#11] -(unknown) Scan parquet spark_catalog.default.date_dim +(22) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#12, d_month_seq#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q63/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q63/explain.txt index 458a35b38..990bc3195 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q63/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q63/explain.txt @@ -29,7 +29,7 @@ TakeOrderedAndProject (28) +- CometScan parquet spark_catalog.default.store (14) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4, i_manager_id#5] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -47,7 +47,7 @@ Arguments: [i_item_sk#1, i_manager_id#5], [i_item_sk#1, i_manager_id#5] (4) ColumnarToRow [codegen id : 4] Input [2]: [i_item_sk#1, i_manager_id#5] -(unknown) Scan parquet spark_catalog.default.store_sales +(5) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#10, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -89,7 +89,7 @@ Join condition: None Output [4]: [i_manager_id#5, ss_store_sk#11, ss_sales_price#12, d_moy#16] Input [6]: [i_manager_id#5, ss_store_sk#11, ss_sales_price#12, ss_sold_date_sk#13, d_date_sk#15, d_moy#16] -(unknown) Scan parquet spark_catalog.default.store +(14) Scan parquet spark_catalog.default.store Output [1]: [s_store_sk#17] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -169,7 +169,7 @@ BroadcastExchange (33) +- CometScan parquet spark_catalog.default.date_dim (29) -(unknown) Scan parquet spark_catalog.default.date_dim +(29) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#15, d_month_seq#24, d_moy#16] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q64/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q64/explain.txt index a6f957162..95cb8ab51 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q64/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q64/explain.txt @@ -182,7 +182,7 @@ +- ReusedExchange (171) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -198,7 +198,7 @@ Condition : (((((((isnotnull(ss_item_sk#1) AND isnotnull(ss_ticket_number#8)) AN Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Arguments: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_ticket_number#8, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] -(unknown) Scan parquet spark_catalog.default.store_returns +(4) Scan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#14, sr_ticket_number#15, sr_returned_date_sk#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -233,7 +233,7 @@ Arguments: hashpartitioning(ss_item_sk#1, 5), ENSURE_REQUIREMENTS, [plan_id=1] Input [11]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12] Arguments: [ss_item_sk#1 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_sales +(12) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19, cs_sold_date_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -259,7 +259,7 @@ Arguments: hashpartitioning(cs_item_sk#17, cs_order_number#18, 5), ENSURE_REQUIR Input [3]: [cs_item_sk#17, cs_order_number#18, cs_ext_list_price#19] Arguments: [cs_item_sk#17 ASC NULLS FIRST, cs_order_number#18 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(18) Scan parquet spark_catalog.default.catalog_returns Output [6]: [cr_item_sk#21, cr_order_number#22, cr_refunded_cash#23, cr_reversed_charge#24, cr_store_credit#25, cr_returned_date_sk#26] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -348,7 +348,7 @@ Join condition: None Output [11]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38] Input [13]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, ss_sold_date_sk#12, d_date_sk#37, d_year#38] -(unknown) Scan parquet spark_catalog.default.store +(37) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#39, s_store_name#40, s_zip#41] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -376,7 +376,7 @@ Join condition: None Output [12]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41] Input [14]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_store_sk#6, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_sk#39, s_store_name#40, s_zip#41] -(unknown) Scan parquet spark_catalog.default.customer +(43) Scan parquet spark_catalog.default.customer Output [6]: [c_customer_sk#42, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, c_first_sales_date_sk#47] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -404,7 +404,7 @@ Join condition: None Output [16]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, c_first_sales_date_sk#47] Input [18]: [ss_item_sk#1, ss_customer_sk#2, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_customer_sk#42, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, c_first_sales_date_sk#47] -(unknown) Scan parquet spark_catalog.default.date_dim +(49) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#48, d_year#49] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -445,7 +445,7 @@ Join condition: None Output [16]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51] Input [18]: [ss_item_sk#1, ss_cdemo_sk#3, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, c_first_shipto_date_sk#46, d_year#49, d_date_sk#50, d_year#51] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(58) Scan parquet spark_catalog.default.customer_demographics Output [2]: [cd_demo_sk#52, cd_marital_status#53] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -486,7 +486,7 @@ Join condition: NOT (cd_marital_status#53 = cd_marital_status#55) Output [14]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51] Input [18]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_cdemo_sk#43, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51, cd_marital_status#53, cd_demo_sk#54, cd_marital_status#55] -(unknown) Scan parquet spark_catalog.default.promotion +(67) Scan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#56] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -514,7 +514,7 @@ Join condition: None Output [13]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51] Input [15]: [ss_item_sk#1, ss_hdemo_sk#4, ss_addr_sk#5, ss_promo_sk#7, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51, p_promo_sk#56] -(unknown) Scan parquet spark_catalog.default.household_demographics +(73) Scan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#57, hd_income_band_sk#58] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -555,7 +555,7 @@ Join condition: None Output [13]: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_addr_sk#45, d_year#49, d_year#51, hd_income_band_sk#58, hd_income_band_sk#60] Input [15]: [ss_item_sk#1, ss_addr_sk#5, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_hdemo_sk#44, c_current_addr_sk#45, d_year#49, d_year#51, hd_income_band_sk#58, hd_demo_sk#59, hd_income_band_sk#60] -(unknown) Scan parquet spark_catalog.default.customer_address +(82) Scan parquet spark_catalog.default.customer_address Output [5]: [ca_address_sk#61, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -596,7 +596,7 @@ Join condition: None Output [19]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, d_year#49, d_year#51, hd_income_band_sk#58, hd_income_band_sk#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70] Input [21]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, c_current_addr_sk#45, d_year#49, d_year#51, hd_income_band_sk#58, hd_income_band_sk#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_address_sk#66, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70] -(unknown) Scan parquet spark_catalog.default.income_band +(91) Scan parquet spark_catalog.default.income_band Output [1]: [ib_income_band_sk#71] Batched: true Location [not included in comparison]/{warehouse_dir}/income_band] @@ -637,7 +637,7 @@ Join condition: None Output [17]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, d_year#49, d_year#51, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70] Input [19]: [ss_item_sk#1, ss_wholesale_cost#9, ss_list_price#10, ss_coupon_amt#11, d_year#38, s_store_name#40, s_zip#41, d_year#49, d_year#51, hd_income_band_sk#60, ca_street_number#62, ca_street_name#63, ca_city#64, ca_zip#65, ca_street_number#67, ca_street_name#68, ca_city#69, ca_zip#70, ib_income_band_sk#72] -(unknown) Scan parquet spark_catalog.default.item +(100) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#73, i_current_price#74, i_color#75, i_product_name#76] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -691,7 +691,7 @@ Arguments: hashpartitioning(item_sk#90, store_name#91, store_zip#92, 5), ENSURE_ Input [17]: [product_name#89, item_sk#90, store_name#91, store_zip#92, b_street_number#93, b_streen_name#94, b_city#95, b_zip#96, c_street_number#97, c_street_name#98, c_city#99, c_zip#100, syear#101, cnt#102, s1#103, s2#104, s3#105] Arguments: [item_sk#90 ASC NULLS FIRST, store_name#91 ASC NULLS FIRST, store_zip#92 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_sales +(111) Scan parquet spark_catalog.default.store_sales Output [12]: [ss_item_sk#106, ss_customer_sk#107, ss_cdemo_sk#108, ss_hdemo_sk#109, ss_addr_sk#110, ss_store_sk#111, ss_promo_sk#112, ss_ticket_number#113, ss_wholesale_cost#114, ss_list_price#115, ss_coupon_amt#116, ss_sold_date_sk#117] Batched: true Location: InMemoryFileIndex [] @@ -707,7 +707,7 @@ Condition : (((((((isnotnull(ss_item_sk#106) AND isnotnull(ss_ticket_number#113) Input [12]: [ss_item_sk#106, ss_customer_sk#107, ss_cdemo_sk#108, ss_hdemo_sk#109, ss_addr_sk#110, ss_store_sk#111, ss_promo_sk#112, ss_ticket_number#113, ss_wholesale_cost#114, ss_list_price#115, ss_coupon_amt#116, ss_sold_date_sk#117] Arguments: [ss_item_sk#106, ss_customer_sk#107, ss_cdemo_sk#108, ss_hdemo_sk#109, ss_addr_sk#110, ss_store_sk#111, ss_promo_sk#112, ss_ticket_number#113, ss_wholesale_cost#114, ss_list_price#115, ss_coupon_amt#116, ss_sold_date_sk#117] -(unknown) Scan parquet spark_catalog.default.store_returns +(114) Scan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#119, sr_ticket_number#120, sr_returned_date_sk#121] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -1018,7 +1018,7 @@ BroadcastExchange (185) +- CometScan parquet spark_catalog.default.date_dim (182) -(unknown) Scan parquet spark_catalog.default.date_dim +(182) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#37, d_year#38] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -1043,7 +1043,7 @@ BroadcastExchange (189) +- CometScan parquet spark_catalog.default.date_dim (186) -(unknown) Scan parquet spark_catalog.default.date_dim +(186) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#130, d_year#131] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q65/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q65/explain.txt index 1a06f27fb..eda84bb52 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q65/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q65/explain.txt @@ -40,7 +40,7 @@ TakeOrderedAndProject (39) +- ReusedExchange (26) -(unknown) Scan parquet spark_catalog.default.store +(1) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#1, s_store_name#2] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -54,7 +54,7 @@ Condition : isnotnull(s_store_sk#1) (3) ColumnarToRow [codegen id : 9] Input [2]: [s_store_sk#1, s_store_name#2] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#3, ss_store_sk#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -118,7 +118,7 @@ Join condition: None Output [4]: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#12] Input [5]: [s_store_sk#1, s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#12] -(unknown) Scan parquet spark_catalog.default.item +(17) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#13, i_item_desc#14, i_current_price#15, i_wholesale_cost#16, i_brand#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -146,7 +146,7 @@ Join condition: None Output [7]: [s_store_name#2, ss_store_sk#4, revenue#12, i_item_desc#14, i_current_price#15, i_wholesale_cost#16, i_brand#17] Input [9]: [s_store_name#2, ss_store_sk#4, ss_item_sk#3, revenue#12, i_item_sk#13, i_item_desc#14, i_current_price#15, i_wholesale_cost#16, i_brand#17] -(unknown) Scan parquet spark_catalog.default.store_sales +(23) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#18, ss_store_sk#19, ss_sales_price#20, ss_sold_date_sk#21] Batched: true Location: InMemoryFileIndex [] @@ -242,7 +242,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(unknown) Scan parquet spark_catalog.default.date_dim +(40) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#8, d_month_seq#34] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt index 41e783d13..a26c457aa 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q66/explain.txt @@ -53,7 +53,7 @@ TakeOrderedAndProject (52) +- ReusedExchange (42) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [7]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_warehouse_sk#3, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, ws_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -68,7 +68,7 @@ Condition : ((isnotnull(ws_warehouse_sk#3) AND isnotnull(ws_sold_time_sk#1)) AND (3) ColumnarToRow [codegen id : 5] Input [7]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_warehouse_sk#3, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, ws_sold_date_sk#7] -(unknown) Scan parquet spark_catalog.default.warehouse +(4) Scan parquet spark_catalog.default.warehouse Output [7]: [w_warehouse_sk#9, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#14, w_country#15] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -109,7 +109,7 @@ Join condition: None Output [13]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#14, w_country#15, d_year#17, d_moy#18] Input [15]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, ws_sold_date_sk#7, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#14, w_country#15, d_date_sk#16, d_year#17, d_moy#18] -(unknown) Scan parquet spark_catalog.default.time_dim +(13) Scan parquet spark_catalog.default.time_dim Output [2]: [t_time_sk#19, t_time#20] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -141,7 +141,7 @@ Join condition: None Output [12]: [ws_ship_mode_sk#2, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#14, w_country#15, d_year#17, d_moy#18] Input [14]: [ws_sold_time_sk#1, ws_ship_mode_sk#2, ws_quantity#4, ws_ext_sales_price#5, ws_net_paid#6, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#14, w_country#15, d_year#17, d_moy#18, t_time_sk#19] -(unknown) Scan parquet spark_catalog.default.ship_mode +(20) Scan parquet spark_catalog.default.ship_mode Output [2]: [sm_ship_mode_sk#21, sm_carrier#22] Batched: true Location [not included in comparison]/{warehouse_dir}/ship_mode] @@ -191,7 +191,7 @@ Functions [24]: [sum(CASE WHEN (d_moy#18 = 1) THEN (ws_ext_sales_price#5 * cast( Aggregate Attributes [24]: [sum(CASE WHEN (d_moy#18 = 1) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#119, sum(CASE WHEN (d_moy#18 = 2) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#120, sum(CASE WHEN (d_moy#18 = 3) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#121, sum(CASE WHEN (d_moy#18 = 4) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#122, sum(CASE WHEN (d_moy#18 = 5) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#123, sum(CASE WHEN (d_moy#18 = 6) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#124, sum(CASE WHEN (d_moy#18 = 7) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#125, sum(CASE WHEN (d_moy#18 = 8) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#126, sum(CASE WHEN (d_moy#18 = 9) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#127, sum(CASE WHEN (d_moy#18 = 10) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#128, sum(CASE WHEN (d_moy#18 = 11) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#129, sum(CASE WHEN (d_moy#18 = 12) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#130, sum(CASE WHEN (d_moy#18 = 1) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#131, sum(CASE WHEN (d_moy#18 = 2) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#132, sum(CASE WHEN (d_moy#18 = 3) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#133, sum(CASE WHEN (d_moy#18 = 4) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#134, sum(CASE WHEN (d_moy#18 = 5) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#135, sum(CASE WHEN (d_moy#18 = 6) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#136, sum(CASE WHEN (d_moy#18 = 7) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#137, sum(CASE WHEN (d_moy#18 = 8) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#138, sum(CASE WHEN (d_moy#18 = 9) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#139, sum(CASE WHEN (d_moy#18 = 10) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#140, sum(CASE WHEN (d_moy#18 = 11) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#141, sum(CASE WHEN (d_moy#18 = 12) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#142] Results [32]: [w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#14, w_country#15, DHL,BARIAN AS ship_carriers#143, d_year#17 AS year#144, sum(CASE WHEN (d_moy#18 = 1) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#119 AS jan_sales#145, sum(CASE WHEN (d_moy#18 = 2) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#120 AS feb_sales#146, sum(CASE WHEN (d_moy#18 = 3) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#121 AS mar_sales#147, sum(CASE WHEN (d_moy#18 = 4) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#122 AS apr_sales#148, sum(CASE WHEN (d_moy#18 = 5) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#123 AS may_sales#149, sum(CASE WHEN (d_moy#18 = 6) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#124 AS jun_sales#150, sum(CASE WHEN (d_moy#18 = 7) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#125 AS jul_sales#151, sum(CASE WHEN (d_moy#18 = 8) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#126 AS aug_sales#152, sum(CASE WHEN (d_moy#18 = 9) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#127 AS sep_sales#153, sum(CASE WHEN (d_moy#18 = 10) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#128 AS oct_sales#154, sum(CASE WHEN (d_moy#18 = 11) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#129 AS nov_sales#155, sum(CASE WHEN (d_moy#18 = 12) THEN (ws_ext_sales_price#5 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#130 AS dec_sales#156, sum(CASE WHEN (d_moy#18 = 1) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#131 AS jan_net#157, sum(CASE WHEN (d_moy#18 = 2) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#132 AS feb_net#158, sum(CASE WHEN (d_moy#18 = 3) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#133 AS mar_net#159, sum(CASE WHEN (d_moy#18 = 4) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#134 AS apr_net#160, sum(CASE WHEN (d_moy#18 = 5) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#135 AS may_net#161, sum(CASE WHEN (d_moy#18 = 6) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#136 AS jun_net#162, sum(CASE WHEN (d_moy#18 = 7) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#137 AS jul_net#163, sum(CASE WHEN (d_moy#18 = 8) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#138 AS aug_net#164, sum(CASE WHEN (d_moy#18 = 9) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#139 AS sep_net#165, sum(CASE WHEN (d_moy#18 = 10) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#140 AS oct_net#166, sum(CASE WHEN (d_moy#18 = 11) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#141 AS nov_net#167, sum(CASE WHEN (d_moy#18 = 12) THEN (ws_net_paid#6 * cast(ws_quantity#4 as decimal(10,0))) ELSE 0.00 END)#142 AS dec_net#168] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(30) Scan parquet spark_catalog.default.catalog_sales Output [7]: [cs_sold_time_sk#169, cs_ship_mode_sk#170, cs_warehouse_sk#171, cs_quantity#172, cs_sales_price#173, cs_net_paid_inc_tax#174, cs_sold_date_sk#175] Batched: true Location: InMemoryFileIndex [] @@ -309,7 +309,7 @@ BroadcastExchange (56) +- CometScan parquet spark_catalog.default.date_dim (53) -(unknown) Scan parquet spark_catalog.default.date_dim +(53) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#16, d_year#17, d_moy#18] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q67/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q67/explain.txt index 5dabc82d2..53e9b00bf 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q67/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q67/explain.txt @@ -28,7 +28,7 @@ TakeOrderedAndProject (27) +- CometScan parquet spark_catalog.default.item (13) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_quantity#3, ss_sales_price#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -56,7 +56,7 @@ Join condition: None Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_quantity#3, ss_sales_price#4, d_year#8, d_moy#9, d_qoy#10] Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_quantity#3, ss_sales_price#4, ss_sold_date_sk#5, d_date_sk#7, d_year#8, d_moy#9, d_qoy#10] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_store_id#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -84,7 +84,7 @@ Join condition: None Output [7]: [ss_item_sk#1, ss_quantity#3, ss_sales_price#4, d_year#8, d_moy#9, d_qoy#10, s_store_id#12] Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_quantity#3, ss_sales_price#4, d_year#8, d_moy#9, d_qoy#10, s_store_sk#11, s_store_id#12] -(unknown) Scan parquet spark_catalog.default.item +(13) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#13, i_brand#14, i_class#15, i_category#16, i_product_name#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -164,7 +164,7 @@ BroadcastExchange (32) +- CometScan parquet spark_catalog.default.date_dim (28) -(unknown) Scan parquet spark_catalog.default.date_dim +(28) Scan parquet spark_catalog.default.date_dim Output [5]: [d_date_sk#7, d_month_seq#34, d_year#8, d_moy#9, d_qoy#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q68/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q68/explain.txt index d87d02e87..734b6c11c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q68/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q68/explain.txt @@ -40,7 +40,7 @@ TakeOrderedAndProject (39) +- ReusedExchange (36) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [9]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_ext_sales_price#6, ss_ext_list_price#7, ss_ext_tax#8, ss_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -68,7 +68,7 @@ Join condition: None Output [8]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_ext_sales_price#6, ss_ext_list_price#7, ss_ext_tax#8] Input [10]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_ext_sales_price#6, ss_ext_list_price#7, ss_ext_tax#8, ss_sold_date_sk#9, d_date_sk#11] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#12, s_city#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -100,7 +100,7 @@ Join condition: None Output [7]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_ticket_number#5, ss_ext_sales_price#6, ss_ext_list_price#7, ss_ext_tax#8] Input [9]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_ext_sales_price#6, ss_ext_list_price#7, ss_ext_tax#8, s_store_sk#12] -(unknown) Scan parquet spark_catalog.default.household_demographics +(14) Scan parquet spark_catalog.default.household_demographics Output [3]: [hd_demo_sk#14, hd_dep_count#15, hd_vehicle_count#16] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -132,7 +132,7 @@ Join condition: None Output [6]: [ss_customer_sk#1, ss_addr_sk#3, ss_ticket_number#5, ss_ext_sales_price#6, ss_ext_list_price#7, ss_ext_tax#8] Input [8]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_ticket_number#5, ss_ext_sales_price#6, ss_ext_list_price#7, ss_ext_tax#8, hd_demo_sk#14] -(unknown) Scan parquet spark_catalog.default.customer_address +(21) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#17, ca_city#18] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -178,7 +178,7 @@ Functions [3]: [sum(UnscaledValue(ss_ext_sales_price#6)), sum(UnscaledValue(ss_e Aggregate Attributes [3]: [sum(UnscaledValue(ss_ext_sales_price#6))#25, sum(UnscaledValue(ss_ext_list_price#7))#26, sum(UnscaledValue(ss_ext_tax#8))#27] Results [6]: [ss_ticket_number#5, ss_customer_sk#1, ca_city#18 AS bought_city#28, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#6))#25,17,2) AS extended_price#29, MakeDecimal(sum(UnscaledValue(ss_ext_list_price#7))#26,17,2) AS list_price#30, MakeDecimal(sum(UnscaledValue(ss_ext_tax#8))#27,17,2) AS extended_tax#31] -(unknown) Scan parquet spark_catalog.default.customer +(30) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#32, c_current_addr_sk#33, c_first_name#34, c_last_name#35] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -233,7 +233,7 @@ BroadcastExchange (44) +- CometScan parquet spark_catalog.default.date_dim (40) -(unknown) Scan parquet spark_catalog.default.date_dim +(40) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#11, d_year#38, d_dom#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q69/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q69/explain.txt index 9a3262723..d56b01d73 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q69/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q69/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- CometScan parquet spark_catalog.default.customer_demographics (33) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -57,7 +57,7 @@ Condition : (isnotnull(c_current_addr_sk#3) AND isnotnull(c_current_cdemo_sk#2)) (3) ColumnarToRow [codegen id : 9] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -90,7 +90,7 @@ Right keys [1]: [ss_customer_sk#4] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(11) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#8, ws_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -123,7 +123,7 @@ Right keys [1]: [ws_bill_customer_sk#8] Join type: LeftAnti Join condition: None -(unknown) Scan parquet spark_catalog.default.catalog_sales +(18) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#12, cs_sold_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -160,7 +160,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#2, c_current_addr_sk#3] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(unknown) Scan parquet spark_catalog.default.customer_address +(26) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -192,7 +192,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#2] Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#16] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(33) Scan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#18, cd_gender#19, cd_marital_status#20, cd_education_status#21, cd_purchase_estimate#22, cd_credit_rating#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -252,7 +252,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(unknown) Scan parquet spark_catalog.default.date_dim +(43) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#30, d_moy#31] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt index 18ff7c459..790a917e3 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q7/explain.txt @@ -31,7 +31,7 @@ TakeOrderedAndProject (30) +- CometScan parquet spark_catalog.default.promotion (20) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -46,7 +46,7 @@ Condition : ((isnotnull(ss_cdemo_sk#2) AND isnotnull(ss_item_sk#1)) AND isnotnul (3) ColumnarToRow [codegen id : 5] Input [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(4) Scan parquet spark_catalog.default.customer_demographics Output [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -91,7 +91,7 @@ Join condition: None Output [6]: [ss_item_sk#1, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7] Input [8]: [ss_item_sk#1, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.item +(14) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#15, i_item_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -119,7 +119,7 @@ Join condition: None Output [6]: [ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, i_item_id#16] Input [8]: [ss_item_sk#1, ss_promo_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, i_item_sk#15, i_item_id#16] -(unknown) Scan parquet spark_catalog.default.promotion +(20) Scan parquet spark_catalog.default.promotion Output [3]: [p_promo_sk#17, p_channel_email#18, p_channel_event#19] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -183,7 +183,7 @@ BroadcastExchange (35) +- CometScan parquet spark_catalog.default.date_dim (31) -(unknown) Scan parquet spark_catalog.default.date_dim +(31) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_year#44] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q70/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q70/explain.txt index 32499fad8..82deb80a4 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q70/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q70/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- ReusedExchange (19) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -71,7 +71,7 @@ Join condition: None Output [2]: [ss_store_sk#1, ss_net_profit#2] Input [4]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3, d_date_sk#5] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#6, s_county#7, s_state#8] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -85,7 +85,7 @@ Condition : isnotnull(s_store_sk#6) (9) ColumnarToRow [codegen id : 7] Input [3]: [s_store_sk#6, s_county#7, s_state#8] -(unknown) Scan parquet spark_catalog.default.store_sales +(10) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#9, ss_net_profit#10, ss_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ Condition : isnotnull(ss_store_sk#9) (12) ColumnarToRow [codegen id : 4] Input [3]: [ss_store_sk#9, ss_net_profit#10, ss_sold_date_sk#11] -(unknown) Scan parquet spark_catalog.default.store +(13) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#13, s_state#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -251,7 +251,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(unknown) Scan parquet spark_catalog.default.date_dim +(43) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#33] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt index 39bedd1f2..624103b66 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q71/explain.txt @@ -39,7 +39,7 @@ +- CometScan parquet spark_catalog.default.time_dim (27) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand_id#2, i_brand#3, i_manager_id#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -61,7 +61,7 @@ Input [3]: [i_item_sk#1, i_brand_id#2, i_brand#3] Input [3]: [i_item_sk#1, i_brand_id#2, i_brand#3] Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)),false), [plan_id=1] -(unknown) Scan parquet spark_catalog.default.web_sales +(6) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_sold_time_sk#5, ws_item_sk#6, ws_ext_sales_price#7, ws_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -89,7 +89,7 @@ Join condition: None Output [3]: [ws_ext_sales_price#7 AS ext_price#11, ws_item_sk#6 AS sold_item_sk#12, ws_sold_time_sk#5 AS time_sk#13] Input [5]: [ws_sold_time_sk#5, ws_item_sk#6, ws_ext_sales_price#7, ws_sold_date_sk#8, d_date_sk#10] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(12) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_sold_time_sk#14, cs_item_sk#15, cs_ext_sales_price#16, cs_sold_date_sk#17] Batched: true Location: InMemoryFileIndex [] @@ -117,7 +117,7 @@ Join condition: None Output [3]: [cs_ext_sales_price#16 AS ext_price#20, cs_item_sk#15 AS sold_item_sk#21, cs_sold_time_sk#14 AS time_sk#22] Input [5]: [cs_sold_time_sk#14, cs_item_sk#15, cs_ext_sales_price#16, cs_sold_date_sk#17, d_date_sk#19] -(unknown) Scan parquet spark_catalog.default.store_sales +(18) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#23, ss_item_sk#24, ss_ext_sales_price#25, ss_sold_date_sk#26] Batched: true Location: InMemoryFileIndex [] @@ -157,7 +157,7 @@ Join condition: None Output [4]: [i_brand_id#2, i_brand#3, ext_price#11, time_sk#13] Input [6]: [i_item_sk#1, i_brand_id#2, i_brand#3, ext_price#11, sold_item_sk#12, time_sk#13] -(unknown) Scan parquet spark_catalog.default.time_dim +(27) Scan parquet spark_catalog.default.time_dim Output [4]: [t_time_sk#32, t_hour#33, t_minute#34, t_meal_time#35] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -225,7 +225,7 @@ BroadcastExchange (43) +- CometScan parquet spark_catalog.default.date_dim (39) -(unknown) Scan parquet spark_catalog.default.date_dim +(39) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#42, d_moy#43] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt index e55a0ca10..4e5d9e9f6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q72/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- CometScan parquet spark_catalog.default.catalog_returns (59) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [8]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, cs_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -86,7 +86,7 @@ Condition : ((((isnotnull(cs_quantity#7) AND isnotnull(cs_item_sk#4)) AND isnotn (3) ColumnarToRow [codegen id : 10] Input [8]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, cs_sold_date_sk#8] -(unknown) Scan parquet spark_catalog.default.inventory +(4) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#10, inv_warehouse_sk#11, inv_quantity_on_hand#12, inv_date_sk#13] Batched: true Location: InMemoryFileIndex [] @@ -115,7 +115,7 @@ Join condition: (inv_quantity_on_hand#12 < cs_quantity#7) Output [9]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_warehouse_sk#11, inv_date_sk#13] Input [12]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_quantity#7, cs_sold_date_sk#8, inv_item_sk#10, inv_warehouse_sk#11, inv_quantity_on_hand#12, inv_date_sk#13] -(unknown) Scan parquet spark_catalog.default.warehouse +(10) Scan parquet spark_catalog.default.warehouse Output [2]: [w_warehouse_sk#14, w_warehouse_name#15] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -143,7 +143,7 @@ Join condition: None Output [9]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_date_sk#13, w_warehouse_name#15] Input [11]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_warehouse_sk#11, inv_date_sk#13, w_warehouse_sk#14, w_warehouse_name#15] -(unknown) Scan parquet spark_catalog.default.item +(16) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#16, i_item_desc#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -171,7 +171,7 @@ Join condition: None Output [10]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_date_sk#13, w_warehouse_name#15, i_item_desc#17] Input [11]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_date_sk#13, w_warehouse_name#15, i_item_sk#16, i_item_desc#17] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(22) Scan parquet spark_catalog.default.customer_demographics Output [2]: [cd_demo_sk#18, cd_marital_status#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -203,7 +203,7 @@ Join condition: None Output [9]: [cs_ship_date_sk#1, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_date_sk#13, w_warehouse_name#15, i_item_desc#17] Input [11]: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_date_sk#13, w_warehouse_name#15, i_item_desc#17, cd_demo_sk#18] -(unknown) Scan parquet spark_catalog.default.household_demographics +(29) Scan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#20, hd_buy_potential#21] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -248,7 +248,7 @@ Join condition: None Output [9]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, inv_date_sk#13, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24] Input [11]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, cs_sold_date_sk#8, inv_date_sk#13, w_warehouse_name#15, i_item_desc#17, d_date_sk#22, d_date#23, d_week_seq#24] -(unknown) Scan parquet spark_catalog.default.date_dim +(39) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#25, d_week_seq#26] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -276,7 +276,7 @@ Join condition: None Output [8]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24] Input [11]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, inv_date_sk#13, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#25, d_week_seq#26] -(unknown) Scan parquet spark_catalog.default.date_dim +(45) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#27, d_date#28] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -304,7 +304,7 @@ Join condition: (d_date#28 > date_add(d_date#23, 5)) Output [6]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#27, d_date#28] -(unknown) Scan parquet spark_catalog.default.promotion +(51) Scan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#29] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -340,7 +340,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(59) Scan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#30, cr_order_number#31, cr_returned_date_sk#32] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -408,7 +408,7 @@ BroadcastExchange (75) +- CometScan parquet spark_catalog.default.date_dim (71) -(unknown) Scan parquet spark_catalog.default.date_dim +(71) Scan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt index dc64c3386..ef2c38aea 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q73/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -61,7 +61,7 @@ Join condition: None Output [4]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4] Input [6]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5, d_date_sk#7] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#8, s_county#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -93,7 +93,7 @@ Join condition: None Output [3]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_ticket_number#4] Input [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, s_store_sk#8] -(unknown) Scan parquet spark_catalog.default.household_demographics +(14) Scan parquet spark_catalog.default.household_demographics Output [4]: [hd_demo_sk#10, hd_buy_potential#11, hd_dep_count#12, hd_vehicle_count#13] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -147,7 +147,7 @@ Results [3]: [ss_ticket_number#4, ss_customer_sk#1, count(1)#16 AS cnt#17] Input [3]: [ss_ticket_number#4, ss_customer_sk#1, cnt#17] Condition : ((cnt#17 >= 1) AND (cnt#17 <= 5)) -(unknown) Scan parquet spark_catalog.default.customer +(25) Scan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -193,7 +193,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(unknown) Scan parquet spark_catalog.default.date_dim +(33) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#23, d_dom#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt index 85413ac1c..dad94eb2c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q74/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (62) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -86,7 +86,7 @@ Condition : (isnotnull(c_customer_sk#1) AND isnotnull(c_customer_id#2)) (3) ColumnarToRow [codegen id : 3] Input [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_customer_sk#5, ss_net_paid#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Results [2]: [c_customer_id#2 AS customer_id#14, MakeDecimal(sum(UnscaledValue(s Input [2]: [customer_id#14, year_total#15] Condition : (isnotnull(year_total#15) AND (year_total#15 > 0.00)) -(unknown) Scan parquet spark_catalog.default.customer +(17) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#16, c_customer_id#17, c_first_name#18, c_last_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -164,7 +164,7 @@ Condition : (isnotnull(c_customer_sk#16) AND isnotnull(c_customer_id#17)) (19) ColumnarToRow [codegen id : 6] Input [4]: [c_customer_sk#16, c_customer_id#17, c_first_name#18, c_last_name#19] -(unknown) Scan parquet spark_catalog.default.store_sales +(20) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_customer_sk#20, ss_net_paid#21, ss_sold_date_sk#22] Batched: true Location: InMemoryFileIndex [] @@ -234,7 +234,7 @@ Right keys [1]: [customer_id#28] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.customer +(34) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#32, c_customer_id#33, c_first_name#34, c_last_name#35] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -248,7 +248,7 @@ Condition : (isnotnull(c_customer_sk#32) AND isnotnull(c_customer_id#33)) (36) ColumnarToRow [codegen id : 10] Input [4]: [c_customer_sk#32, c_customer_id#33, c_first_name#34, c_last_name#35] -(unknown) Scan parquet spark_catalog.default.web_sales +(37) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_customer_sk#36, ws_net_paid#37, ws_sold_date_sk#38] Batched: true Location: InMemoryFileIndex [] @@ -326,7 +326,7 @@ Join condition: None Output [7]: [customer_id#14, year_total#15, customer_id#28, customer_first_name#29, customer_last_name#30, year_total#31, year_total#46] Input [8]: [customer_id#14, year_total#15, customer_id#28, customer_first_name#29, customer_last_name#30, year_total#31, customer_id#45, year_total#46] -(unknown) Scan parquet spark_catalog.default.customer +(53) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#47, c_customer_id#48, c_first_name#49, c_last_name#50] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -340,7 +340,7 @@ Condition : (isnotnull(c_customer_sk#47) AND isnotnull(c_customer_id#48)) (55) ColumnarToRow [codegen id : 14] Input [4]: [c_customer_sk#47, c_customer_id#48, c_first_name#49, c_last_name#50] -(unknown) Scan parquet spark_catalog.default.web_sales +(56) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_customer_sk#51, ws_net_paid#52, ws_sold_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -427,7 +427,7 @@ BroadcastExchange (75) +- CometScan parquet spark_catalog.default.date_dim (72) -(unknown) Scan parquet spark_catalog.default.date_dim +(72) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_year#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -452,7 +452,7 @@ BroadcastExchange (79) +- CometScan parquet spark_catalog.default.date_dim (76) -(unknown) Scan parquet spark_catalog.default.date_dim +(76) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q75/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q75/explain.txt index 0ff759f36..5b9b2c228 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q75/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q75/explain.txt @@ -130,7 +130,7 @@ TakeOrderedAndProject (129) +- ReusedExchange (113) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [5]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, cs_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -145,7 +145,7 @@ Condition : isnotnull(cs_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [5]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, cs_sold_date_sk#5] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -198,7 +198,7 @@ Arguments: hashpartitioning(cs_order_number#2, cs_item_sk#1, 5), ENSURE_REQUIREM Input [9]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, d_year#14] Arguments: [cs_order_number#2 ASC NULLS FIRST, cs_item_sk#1 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(16) Scan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#15, cr_order_number#16, cr_return_quantity#17, cr_return_amount#18, cr_returned_date_sk#19] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -234,7 +234,7 @@ Join condition: None Output [7]: [d_year#14, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, (cs_quantity#3 - coalesce(cr_return_quantity#17, 0)) AS sales_cnt#20, (cs_ext_sales_price#4 - coalesce(cr_return_amount#18, 0.00)) AS sales_amt#21] Input [13]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, d_year#14, cr_item_sk#15, cr_order_number#16, cr_return_quantity#17, cr_return_amount#18] -(unknown) Scan parquet spark_catalog.default.store_sales +(24) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#22, ss_ticket_number#23, ss_quantity#24, ss_ext_sales_price#25, ss_sold_date_sk#26] Batched: true Location: InMemoryFileIndex [] @@ -283,7 +283,7 @@ Arguments: hashpartitioning(ss_ticket_number#23, ss_item_sk#22, 5), ENSURE_REQUI Input [9]: [ss_item_sk#22, ss_ticket_number#23, ss_quantity#24, ss_ext_sales_price#25, i_brand_id#29, i_class_id#30, i_category_id#31, i_manufact_id#32, d_year#34] Arguments: [ss_ticket_number#23 ASC NULLS FIRST, ss_item_sk#22 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(35) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#35, sr_ticket_number#36, sr_return_quantity#37, sr_return_amt#38, sr_returned_date_sk#39] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -319,7 +319,7 @@ Join condition: None Output [7]: [d_year#34, i_brand_id#29, i_class_id#30, i_category_id#31, i_manufact_id#32, (ss_quantity#24 - coalesce(sr_return_quantity#37, 0)) AS sales_cnt#40, (ss_ext_sales_price#25 - coalesce(sr_return_amt#38, 0.00)) AS sales_amt#41] Input [13]: [ss_item_sk#22, ss_ticket_number#23, ss_quantity#24, ss_ext_sales_price#25, i_brand_id#29, i_class_id#30, i_category_id#31, i_manufact_id#32, d_year#34, sr_item_sk#35, sr_ticket_number#36, sr_return_quantity#37, sr_return_amt#38] -(unknown) Scan parquet spark_catalog.default.web_sales +(43) Scan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#42, ws_order_number#43, ws_quantity#44, ws_ext_sales_price#45, ws_sold_date_sk#46] Batched: true Location: InMemoryFileIndex [] @@ -368,7 +368,7 @@ Arguments: hashpartitioning(ws_order_number#43, ws_item_sk#42, 5), ENSURE_REQUIR Input [9]: [ws_item_sk#42, ws_order_number#43, ws_quantity#44, ws_ext_sales_price#45, i_brand_id#49, i_class_id#50, i_category_id#51, i_manufact_id#52, d_year#54] Arguments: [ws_order_number#43 ASC NULLS FIRST, ws_item_sk#42 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_returns +(54) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#55, wr_order_number#56, wr_return_quantity#57, wr_return_amt#58, wr_returned_date_sk#59] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -454,7 +454,7 @@ Arguments: hashpartitioning(i_brand_id#8, i_class_id#9, i_category_id#10, i_manu Input [7]: [d_year#14, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, sales_cnt#68, sales_amt#69] Arguments: [i_brand_id#8 ASC NULLS FIRST, i_class_id#9 ASC NULLS FIRST, i_category_id#10 ASC NULLS FIRST, i_manufact_id#12 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_sales +(72) Scan parquet spark_catalog.default.catalog_sales Output [5]: [cs_item_sk#70, cs_order_number#71, cs_quantity#72, cs_ext_sales_price#73, cs_sold_date_sk#74] Batched: true Location: InMemoryFileIndex [] @@ -520,7 +520,7 @@ Join condition: None Output [7]: [d_year#82, i_brand_id#77, i_class_id#78, i_category_id#79, i_manufact_id#80, (cs_quantity#72 - coalesce(cr_return_quantity#85, 0)) AS sales_cnt#20, (cs_ext_sales_price#73 - coalesce(cr_return_amount#86, 0.00)) AS sales_amt#21] Input [13]: [cs_item_sk#70, cs_order_number#71, cs_quantity#72, cs_ext_sales_price#73, i_brand_id#77, i_class_id#78, i_category_id#79, i_manufact_id#80, d_year#82, cr_item_sk#83, cr_order_number#84, cr_return_quantity#85, cr_return_amount#86] -(unknown) Scan parquet spark_catalog.default.store_sales +(87) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#87, ss_ticket_number#88, ss_quantity#89, ss_ext_sales_price#90, ss_sold_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -586,7 +586,7 @@ Join condition: None Output [7]: [d_year#99, i_brand_id#94, i_class_id#95, i_category_id#96, i_manufact_id#97, (ss_quantity#89 - coalesce(sr_return_quantity#102, 0)) AS sales_cnt#40, (ss_ext_sales_price#90 - coalesce(sr_return_amt#103, 0.00)) AS sales_amt#41] Input [13]: [ss_item_sk#87, ss_ticket_number#88, ss_quantity#89, ss_ext_sales_price#90, i_brand_id#94, i_class_id#95, i_category_id#96, i_manufact_id#97, d_year#99, sr_item_sk#100, sr_ticket_number#101, sr_return_quantity#102, sr_return_amt#103] -(unknown) Scan parquet spark_catalog.default.web_sales +(102) Scan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#104, ws_order_number#105, ws_quantity#106, ws_ext_sales_price#107, ws_sold_date_sk#108] Batched: true Location: InMemoryFileIndex [] @@ -725,7 +725,7 @@ BroadcastExchange (133) +- CometScan parquet spark_catalog.default.date_dim (130) -(unknown) Scan parquet spark_catalog.default.date_dim +(130) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#13, d_year#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -754,7 +754,7 @@ BroadcastExchange (137) +- CometScan parquet spark_catalog.default.date_dim (134) -(unknown) Scan parquet spark_catalog.default.date_dim +(134) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#81, d_year#82] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q76/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q76/explain.txt index 459bc5c01..f3b80d8bd 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q76/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q76/explain.txt @@ -39,7 +39,7 @@ TakeOrderedAndProject (38) +- ReusedExchange (31) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -54,7 +54,7 @@ Condition : (isnull(ss_store_sk#2) AND isnotnull(ss_item_sk#1)) (3) ColumnarToRow [codegen id : 3] Input [4]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_category#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -82,7 +82,7 @@ Join condition: None Output [4]: [ss_store_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4, i_category#6] Input [6]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4, i_item_sk#5, i_category#6] -(unknown) Scan parquet spark_catalog.default.date_dim +(10) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#8, d_qoy#9] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -110,7 +110,7 @@ Join condition: None Output [6]: [store AS channel#10, ss_store_sk#2 AS col_name#11, d_year#8, d_qoy#9, i_category#6, ss_ext_sales_price#3 AS ext_sales_price#12] Input [7]: [ss_store_sk#2, ss_ext_sales_price#3, ss_sold_date_sk#4, i_category#6, d_date_sk#7, d_year#8, d_qoy#9] -(unknown) Scan parquet spark_catalog.default.web_sales +(16) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#13, ws_ship_customer_sk#14, ws_ext_sales_price#15, ws_sold_date_sk#16] Batched: true Location: InMemoryFileIndex [] @@ -151,7 +151,7 @@ Join condition: None Output [6]: [web AS channel#22, ws_ship_customer_sk#14 AS col_name#23, d_year#20, d_qoy#21, i_category#18, ws_ext_sales_price#15 AS ext_sales_price#24] Input [7]: [ws_ship_customer_sk#14, ws_ext_sales_price#15, ws_sold_date_sk#16, i_category#18, d_date_sk#19, d_year#20, d_qoy#21] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(25) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_ship_addr_sk#25, cs_item_sk#26, cs_ext_sales_price#27, cs_sold_date_sk#28] Batched: true Location: InMemoryFileIndex [] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt index 692db69b8..bbfa6a4c4 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q77/explain.txt @@ -86,7 +86,7 @@ TakeOrderedAndProject (85) +- ReusedExchange (71) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -114,7 +114,7 @@ Join condition: None Output [3]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3] Input [5]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4, d_date_sk#6] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [1]: [s_store_sk#7] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -160,7 +160,7 @@ Functions [2]: [sum(UnscaledValue(ss_ext_sales_price#2)), sum(UnscaledValue(ss_n Aggregate Attributes [2]: [sum(UnscaledValue(ss_ext_sales_price#2))#12, sum(UnscaledValue(ss_net_profit#3))#13] Results [3]: [s_store_sk#7, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#2))#12,17,2) AS sales#14, MakeDecimal(sum(UnscaledValue(ss_net_profit#3))#13,17,2) AS profit#15] -(unknown) Scan parquet spark_catalog.default.store_returns +(16) Scan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#16, sr_return_amt#17, sr_net_loss#18, sr_returned_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -233,7 +233,7 @@ Join condition: None Output [5]: [sales#14, coalesce(returns#29, 0.00) AS returns#31, (profit#15 - coalesce(profit_loss#30, 0.00)) AS profit#32, store channel AS channel#33, s_store_sk#7 AS id#34] Input [6]: [s_store_sk#7, sales#14, profit#15, s_store_sk#22, returns#29, profit_loss#30] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(31) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#35, cs_ext_sales_price#36, cs_net_profit#37, cs_sold_date_sk#38] Batched: true Location: InMemoryFileIndex [] @@ -278,7 +278,7 @@ Results [3]: [cs_call_center_sk#35, MakeDecimal(sum(UnscaledValue(cs_ext_sales_p Input [3]: [cs_call_center_sk#35, sales#47, profit#48] Arguments: IdentityBroadcastMode, [plan_id=6] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(40) Scan parquet spark_catalog.default.catalog_returns Output [3]: [cr_return_amount#49, cr_net_loss#50, cr_returned_date_sk#51] Batched: true Location: InMemoryFileIndex [] @@ -327,7 +327,7 @@ Join condition: None Output [5]: [sales#47, returns#60, (profit#48 - profit_loss#61) AS profit#62, catalog channel AS channel#63, cs_call_center_sk#35 AS id#64] Input [5]: [cs_call_center_sk#35, sales#47, profit#48, returns#60, profit_loss#61] -(unknown) Scan parquet spark_catalog.default.web_sales +(50) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_web_page_sk#65, ws_ext_sales_price#66, ws_net_profit#67, ws_sold_date_sk#68] Batched: true Location: InMemoryFileIndex [] @@ -355,7 +355,7 @@ Join condition: None Output [3]: [ws_web_page_sk#65, ws_ext_sales_price#66, ws_net_profit#67] Input [5]: [ws_web_page_sk#65, ws_ext_sales_price#66, ws_net_profit#67, ws_sold_date_sk#68, d_date_sk#70] -(unknown) Scan parquet spark_catalog.default.web_page +(56) Scan parquet spark_catalog.default.web_page Output [1]: [wp_web_page_sk#71] Batched: true Location [not included in comparison]/{warehouse_dir}/web_page] @@ -401,7 +401,7 @@ Functions [2]: [sum(UnscaledValue(ws_ext_sales_price#66)), sum(UnscaledValue(ws_ Aggregate Attributes [2]: [sum(UnscaledValue(ws_ext_sales_price#66))#76, sum(UnscaledValue(ws_net_profit#67))#77] Results [3]: [wp_web_page_sk#71, MakeDecimal(sum(UnscaledValue(ws_ext_sales_price#66))#76,17,2) AS sales#78, MakeDecimal(sum(UnscaledValue(ws_net_profit#67))#77,17,2) AS profit#79] -(unknown) Scan parquet spark_catalog.default.web_returns +(65) Scan parquet spark_catalog.default.web_returns Output [4]: [wr_web_page_sk#80, wr_return_amt#81, wr_net_loss#82, wr_returned_date_sk#83] Batched: true Location: InMemoryFileIndex [] @@ -512,7 +512,7 @@ BroadcastExchange (90) +- CometScan parquet spark_catalog.default.date_dim (86) -(unknown) Scan parquet spark_catalog.default.date_dim +(86) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_date#120] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q78/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q78/explain.txt index d9e4b80d8..7f2688112 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q78/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q78/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- ReusedExchange (60) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [7]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_wholesale_cost#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -94,7 +94,7 @@ Arguments: hashpartitioning(ss_ticket_number#3, ss_item_sk#1, 5), ENSURE_REQUIRE Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_wholesale_cost#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [ss_ticket_number#3 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(6) Scan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#9, sr_ticket_number#10, sr_returned_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -169,7 +169,7 @@ Results [6]: [d_year#13 AS ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, sum( Input [6]: [ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, ss_qty#24, ss_wc#25, ss_sp#26] Arguments: [ss_sold_year#23 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST, ss_customer_sk#2 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_sales +(22) Scan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#27, ws_bill_customer_sk#28, ws_order_number#29, ws_quantity#30, ws_wholesale_cost#31, ws_sales_price#32, ws_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -192,7 +192,7 @@ Arguments: hashpartitioning(ws_order_number#29, ws_item_sk#27, 5), ENSURE_REQUIR Input [7]: [ws_item_sk#27, ws_bill_customer_sk#28, ws_order_number#29, ws_quantity#30, ws_wholesale_cost#31, ws_sales_price#32, ws_sold_date_sk#33] Arguments: [ws_order_number#29 ASC NULLS FIRST, ws_item_sk#27 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_returns +(27) Scan parquet spark_catalog.default.web_returns Output [3]: [wr_item_sk#35, wr_order_number#36, wr_returned_date_sk#37] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -281,7 +281,7 @@ Join condition: None Output [9]: [ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, ss_qty#24, ss_wc#25, ss_sp#26, ws_qty#51, ws_wc#52, ws_sp#53] Input [12]: [ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, ss_qty#24, ss_wc#25, ss_sp#26, ws_sold_year#49, ws_item_sk#27, ws_customer_sk#50, ws_qty#51, ws_wc#52, ws_sp#53] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(46) Scan parquet spark_catalog.default.catalog_sales Output [7]: [cs_bill_customer_sk#54, cs_item_sk#55, cs_order_number#56, cs_quantity#57, cs_wholesale_cost#58, cs_sales_price#59, cs_sold_date_sk#60] Batched: true Location: InMemoryFileIndex [] @@ -304,7 +304,7 @@ Arguments: hashpartitioning(cs_order_number#56, cs_item_sk#55, 5), ENSURE_REQUIR Input [7]: [cs_bill_customer_sk#54, cs_item_sk#55, cs_order_number#56, cs_quantity#57, cs_wholesale_cost#58, cs_sales_price#59, cs_sold_date_sk#60] Arguments: [cs_order_number#56 ASC NULLS FIRST, cs_item_sk#55 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(51) Scan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#62, cr_order_number#63, cr_returned_date_sk#64] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -406,7 +406,7 @@ BroadcastExchange (74) +- CometScan parquet spark_catalog.default.date_dim (71) -(unknown) Scan parquet spark_catalog.default.date_dim +(71) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#12, d_year#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q79/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q79/explain.txt index 667c05e1c..c89bad220 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q79/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q79/explain.txt @@ -31,7 +31,7 @@ TakeOrderedAndProject (30) +- CometScan parquet spark_catalog.default.customer (24) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [8]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -59,7 +59,7 @@ Join condition: None Output [7]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7] Input [9]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, ss_sold_date_sk#8, d_date_sk#10] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#11, s_number_employees#12, s_city#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -91,7 +91,7 @@ Join condition: None Output [7]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, s_city#13] Input [9]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_addr_sk#3, ss_store_sk#4, ss_ticket_number#5, ss_coupon_amt#6, ss_net_profit#7, s_store_sk#11, s_city#13] -(unknown) Scan parquet spark_catalog.default.household_demographics +(14) Scan parquet spark_catalog.default.household_demographics Output [3]: [hd_demo_sk#14, hd_dep_count#15, hd_vehicle_count#16] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -141,7 +141,7 @@ Functions [2]: [sum(UnscaledValue(ss_coupon_amt#6)), sum(UnscaledValue(ss_net_pr Aggregate Attributes [2]: [sum(UnscaledValue(ss_coupon_amt#6))#21, sum(UnscaledValue(ss_net_profit#7))#22] Results [5]: [ss_ticket_number#5, ss_customer_sk#1, s_city#13, MakeDecimal(sum(UnscaledValue(ss_coupon_amt#6))#21,17,2) AS amt#23, MakeDecimal(sum(UnscaledValue(ss_net_profit#7))#22,17,2) AS profit#24] -(unknown) Scan parquet spark_catalog.default.customer +(24) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#25, c_first_name#26, c_last_name#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -183,7 +183,7 @@ BroadcastExchange (35) +- CometScan parquet spark_catalog.default.date_dim (31) -(unknown) Scan parquet spark_catalog.default.date_dim +(31) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#29, d_dow#30] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q8/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q8/explain.txt index f54999ff5..7d6d717c1 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q8/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q8/explain.txt @@ -44,7 +44,7 @@ TakeOrderedAndProject (43) +- CometScan parquet spark_catalog.default.customer (20) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -72,7 +72,7 @@ Join condition: None Output [2]: [ss_store_sk#1, ss_net_profit#2] Input [4]: [ss_store_sk#1, ss_net_profit#2, ss_sold_date_sk#3, d_date_sk#5] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#6, s_store_name#7, s_zip#8] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -100,7 +100,7 @@ Join condition: None Output [3]: [ss_net_profit#2, s_store_name#7, s_zip#8] Input [5]: [ss_store_sk#1, ss_net_profit#2, s_store_sk#6, s_store_name#7, s_zip#8] -(unknown) Scan parquet spark_catalog.default.customer_address +(13) Scan parquet spark_catalog.default.customer_address Output [1]: [ca_zip#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -117,7 +117,7 @@ Arguments: [ca_zip#10], [substr(ca_zip#9, 1, 5) AS ca_zip#10] (16) ColumnarToRow [codegen id : 6] Input [1]: [ca_zip#10] -(unknown) Scan parquet spark_catalog.default.customer_address +(17) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#11, ca_zip#12] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -131,7 +131,7 @@ Condition : isnotnull(ca_address_sk#11) (19) ColumnarToRow [codegen id : 4] Input [2]: [ca_address_sk#11, ca_zip#12] -(unknown) Scan parquet spark_catalog.default.customer +(20) Scan parquet spark_catalog.default.customer Output [2]: [c_current_addr_sk#13, c_preferred_cust_flag#14] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -263,7 +263,7 @@ BroadcastExchange (48) +- CometScan parquet spark_catalog.default.date_dim (44) -(unknown) Scan parquet spark_catalog.default.date_dim +(44) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_year#24, d_qoy#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt index 338f86087..db2d015db 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q80/explain.txt @@ -108,7 +108,7 @@ TakeOrderedAndProject (107) +- ReusedExchange (96) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -131,7 +131,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#4, 5), ENSURE_REQUIRE Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Arguments: [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#4 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(6) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12, sr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -180,7 +180,7 @@ Join condition: None Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12] Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.store +(17) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#15, s_store_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -208,7 +208,7 @@ Join condition: None Output [7]: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#16] Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_sk#15, s_store_id#16] -(unknown) Scan parquet spark_catalog.default.item +(23) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#17, i_current_price#18] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -240,7 +240,7 @@ Join condition: None Output [6]: [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#16] Input [8]: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#16, i_item_sk#17] -(unknown) Scan parquet spark_catalog.default.promotion +(30) Scan parquet spark_catalog.default.promotion Output [2]: [p_promo_sk#19, p_channel_tv#20] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -290,7 +290,7 @@ Functions [3]: [sum(UnscaledValue(ss_ext_sales_price#5)), sum(coalesce(cast(sr_r Aggregate Attributes [3]: [sum(UnscaledValue(ss_ext_sales_price#5))#31, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#32, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#33] Results [5]: [MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#5))#31,17,2) AS sales#34, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#32 AS returns#35, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#33 AS profit#36, store channel AS channel#37, concat(store, s_store_id#16) AS id#38] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(40) Scan parquet spark_catalog.default.catalog_sales Output [7]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_order_number#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Batched: true Location: InMemoryFileIndex [] @@ -313,7 +313,7 @@ Arguments: hashpartitioning(cs_item_sk#40, cs_order_number#42, 5), ENSURE_REQUIR Input [7]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_order_number#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Arguments: [cs_item_sk#40 ASC NULLS FIRST, cs_order_number#42 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(45) Scan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#47, cr_order_number#48, cr_return_amount#49, cr_net_loss#50, cr_returned_date_sk#51] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -362,7 +362,7 @@ Join condition: None Output [7]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_ext_sales_price#43, cs_net_profit#44, cr_return_amount#49, cr_net_loss#50] Input [9]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45, cr_return_amount#49, cr_net_loss#50, d_date_sk#52] -(unknown) Scan parquet spark_catalog.default.catalog_page +(56) Scan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#53, cp_catalog_page_id#54] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -434,7 +434,7 @@ Functions [3]: [sum(UnscaledValue(cs_ext_sales_price#43)), sum(coalesce(cast(cr_ Aggregate Attributes [3]: [sum(UnscaledValue(cs_ext_sales_price#43))#67, sum(coalesce(cast(cr_return_amount#49 as decimal(12,2)), 0.00))#68, sum((cs_net_profit#44 - coalesce(cast(cr_net_loss#50 as decimal(12,2)), 0.00)))#69] Results [5]: [MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#43))#67,17,2) AS sales#70, sum(coalesce(cast(cr_return_amount#49 as decimal(12,2)), 0.00))#68 AS returns#71, sum((cs_net_profit#44 - coalesce(cast(cr_net_loss#50 as decimal(12,2)), 0.00)))#69 AS profit#72, catalog channel AS channel#73, concat(catalog_page, cp_catalog_page_id#54) AS id#74] -(unknown) Scan parquet spark_catalog.default.web_sales +(71) Scan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_order_number#78, ws_ext_sales_price#79, ws_net_profit#80, ws_sold_date_sk#81] Batched: true Location: InMemoryFileIndex [] @@ -457,7 +457,7 @@ Arguments: hashpartitioning(ws_item_sk#75, ws_order_number#78, 5), ENSURE_REQUIR Input [7]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_order_number#78, ws_ext_sales_price#79, ws_net_profit#80, ws_sold_date_sk#81] Arguments: [ws_item_sk#75 ASC NULLS FIRST, ws_order_number#78 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_returns +(76) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#83, wr_order_number#84, wr_return_amt#85, wr_net_loss#86, wr_returned_date_sk#87] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -506,7 +506,7 @@ Join condition: None Output [7]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_ext_sales_price#79, ws_net_profit#80, wr_return_amt#85, wr_net_loss#86] Input [9]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_ext_sales_price#79, ws_net_profit#80, ws_sold_date_sk#81, wr_return_amt#85, wr_net_loss#86, d_date_sk#88] -(unknown) Scan parquet spark_catalog.default.web_site +(87) Scan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#89, web_site_id#90] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] @@ -616,7 +616,7 @@ BroadcastExchange (112) +- CometScan parquet spark_catalog.default.date_dim (108) -(unknown) Scan parquet spark_catalog.default.date_dim +(108) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#132] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt index d8dc396dd..4d07337e6 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q81/explain.txt @@ -49,7 +49,7 @@ TakeOrderedAndProject (48) +- CometScan parquet spark_catalog.default.customer_address (42) -(unknown) Scan parquet spark_catalog.default.catalog_returns +(1) Scan parquet spark_catalog.default.catalog_returns Output [4]: [cr_returning_customer_sk#1, cr_returning_addr_sk#2, cr_return_amt_inc_tax#3, cr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -77,7 +77,7 @@ Join condition: None Output [3]: [cr_returning_customer_sk#1, cr_returning_addr_sk#2, cr_return_amt_inc_tax#3] Input [5]: [cr_returning_customer_sk#1, cr_returning_addr_sk#2, cr_return_amt_inc_tax#3, cr_returned_date_sk#4, d_date_sk#6] -(unknown) Scan parquet spark_catalog.default.customer_address +(7) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#7, ca_state#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -127,7 +127,7 @@ Results [3]: [cr_returning_customer_sk#1 AS ctr_customer_sk#12, ca_state#8 AS ct Input [3]: [ctr_customer_sk#12, ctr_state#13, ctr_total_return#14] Condition : isnotnull(ctr_total_return#14) -(unknown) Scan parquet spark_catalog.default.catalog_returns +(17) Scan parquet spark_catalog.default.catalog_returns Output [4]: [cr_returning_customer_sk#1, cr_returning_addr_sk#2, cr_return_amt_inc_tax#3, cr_returned_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -222,7 +222,7 @@ Join condition: (cast(ctr_total_return#14 as decimal(24,7)) > (avg(ctr_total_ret Output [2]: [ctr_customer_sk#12, ctr_total_return#14] Input [5]: [ctr_customer_sk#12, ctr_state#13, ctr_total_return#14, (avg(ctr_total_return) * 1.2)#23, ctr_state#13#24] -(unknown) Scan parquet spark_catalog.default.customer +(36) Scan parquet spark_catalog.default.customer Output [6]: [c_customer_sk#25, c_customer_id#26, c_current_addr_sk#27, c_salutation#28, c_first_name#29, c_last_name#30] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -250,7 +250,7 @@ Join condition: None Output [6]: [ctr_total_return#14, c_customer_id#26, c_current_addr_sk#27, c_salutation#28, c_first_name#29, c_last_name#30] Input [8]: [ctr_customer_sk#12, ctr_total_return#14, c_customer_sk#25, c_customer_id#26, c_current_addr_sk#27, c_salutation#28, c_first_name#29, c_last_name#30] -(unknown) Scan parquet spark_catalog.default.customer_address +(42) Scan parquet spark_catalog.default.customer_address Output [12]: [ca_address_sk#31, ca_street_number#32, ca_street_name#33, ca_street_type#34, ca_suite_number#35, ca_city#36, ca_county#37, ca_state#38, ca_zip#39, ca_country#40, ca_gmt_offset#41, ca_location_type#42] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -292,7 +292,7 @@ BroadcastExchange (53) +- CometScan parquet spark_catalog.default.date_dim (49) -(unknown) Scan parquet spark_catalog.default.date_dim +(49) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_year#43] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt index 683f7bd94..7609fa520 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q82/explain.txt @@ -26,7 +26,7 @@ TakeOrderedAndProject (25) +- CometScan parquet spark_catalog.default.store_sales (16) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -44,7 +44,7 @@ Arguments: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4], [i_item (4) ColumnarToRow [codegen id : 3] Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] -(unknown) Scan parquet spark_catalog.default.inventory +(5) Scan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#6, inv_quantity_on_hand#7, inv_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -94,7 +94,7 @@ Input [6]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, inv_date Input [4]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4] Arguments: HashedRelationBroadcastMode(List(cast(input[0, int, true] as bigint)),false), [plan_id=2] -(unknown) Scan parquet spark_catalog.default.store_sales +(16) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#11, ss_sold_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -154,7 +154,7 @@ BroadcastExchange (30) +- CometScan parquet spark_catalog.default.date_dim (26) -(unknown) Scan parquet spark_catalog.default.date_dim +(26) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#10, d_date#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt index 164ecf8af..0aa5212b0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q83/explain.txt @@ -47,7 +47,7 @@ TakeOrderedAndProject (46) +- ReusedExchange (37) -(unknown) Scan parquet spark_catalog.default.store_returns +(1) Scan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -62,7 +62,7 @@ Condition : isnotnull(sr_item_sk#1) (3) ColumnarToRow [codegen id : 5] Input [3]: [sr_item_sk#1, sr_return_quantity#2, sr_returned_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_item_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -121,7 +121,7 @@ Functions [1]: [sum(sr_return_quantity#2)] Aggregate Attributes [1]: [sum(sr_return_quantity#2)#10] Results [2]: [i_item_id#6 AS item_id#11, sum(sr_return_quantity#2)#10 AS sr_item_qty#12] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(16) Scan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#13, cr_return_quantity#14, cr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -194,7 +194,7 @@ Join condition: None Output [3]: [item_id#11, sr_item_qty#12, cr_item_qty#24] Input [4]: [item_id#11, sr_item_qty#12, item_id#23, cr_item_qty#24] -(unknown) Scan parquet spark_catalog.default.web_returns +(31) Scan parquet spark_catalog.default.web_returns Output [3]: [wr_item_sk#25, wr_return_quantity#26, wr_returned_date_sk#27] Batched: true Location: InMemoryFileIndex [] @@ -292,7 +292,7 @@ BroadcastExchange (62) +- CometScan parquet spark_catalog.default.date_dim (52) -(unknown) Scan parquet spark_catalog.default.date_dim +(47) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_date#41] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -306,7 +306,7 @@ Condition : isnotnull(d_date_sk#7) (49) ColumnarToRow [codegen id : 3] Input [2]: [d_date_sk#7, d_date#41] -(unknown) Scan parquet spark_catalog.default.date_dim +(50) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date#42, d_week_seq#43] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -315,7 +315,7 @@ ReadSchema: struct (51) ColumnarToRow [codegen id : 2] Input [2]: [d_date#42, d_week_seq#43] -(unknown) Scan parquet spark_catalog.default.date_dim +(52) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date#44, d_week_seq#45] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt index ad509dc72..8dc935d1d 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q84/explain.txt @@ -38,7 +38,7 @@ TakeOrderedAndProject (37) +- CometScan parquet spark_catalog.default.store_returns (31) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [6]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#5, c_last_name#6] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -52,7 +52,7 @@ Condition : ((isnotnull(c_current_addr_sk#4) AND isnotnull(c_current_cdemo_sk#2) (3) ColumnarToRow [codegen id : 5] Input [6]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#5, c_last_name#6] -(unknown) Scan parquet spark_catalog.default.customer_address +(4) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#7, ca_city#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -84,7 +84,7 @@ Join condition: None Output [5]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_first_name#5, c_last_name#6] Input [7]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#5, c_last_name#6, ca_address_sk#7] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(11) Scan parquet spark_catalog.default.customer_demographics Output [1]: [cd_demo_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -112,7 +112,7 @@ Join condition: None Output [5]: [c_customer_id#1, c_current_hdemo_sk#3, c_first_name#5, c_last_name#6, cd_demo_sk#9] Input [6]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_first_name#5, c_last_name#6, cd_demo_sk#9] -(unknown) Scan parquet spark_catalog.default.household_demographics +(17) Scan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#10, hd_income_band_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -140,7 +140,7 @@ Join condition: None Output [5]: [c_customer_id#1, c_first_name#5, c_last_name#6, cd_demo_sk#9, hd_income_band_sk#11] Input [7]: [c_customer_id#1, c_current_hdemo_sk#3, c_first_name#5, c_last_name#6, cd_demo_sk#9, hd_demo_sk#10, hd_income_band_sk#11] -(unknown) Scan parquet spark_catalog.default.income_band +(23) Scan parquet spark_catalog.default.income_band Output [3]: [ib_income_band_sk#12, ib_lower_bound#13, ib_upper_bound#14] Batched: true Location [not included in comparison]/{warehouse_dir}/income_band] @@ -176,7 +176,7 @@ Input [6]: [c_customer_id#1, c_first_name#5, c_last_name#6, cd_demo_sk#9, hd_inc Input [4]: [c_customer_id#1, c_first_name#5, c_last_name#6, cd_demo_sk#9] Arguments: HashedRelationBroadcastMode(List(cast(input[3, int, true] as bigint)),false), [plan_id=5] -(unknown) Scan parquet spark_catalog.default.store_returns +(31) Scan parquet spark_catalog.default.store_returns Output [2]: [sr_cdemo_sk#15, sr_returned_date_sk#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q85/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q85/explain.txt index 53329906e..ce1cc2262 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q85/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q85/explain.txt @@ -48,7 +48,7 @@ TakeOrderedAndProject (47) +- CometScan parquet spark_catalog.default.reason (38) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#1, ws_web_page_sk#2, ws_order_number#3, ws_quantity#4, ws_sales_price#5, ws_net_profit#6, ws_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -64,7 +64,7 @@ Condition : ((((isnotnull(ws_item_sk#1) AND isnotnull(ws_order_number#3)) AND is Input [7]: [ws_item_sk#1, ws_web_page_sk#2, ws_order_number#3, ws_quantity#4, ws_sales_price#5, ws_net_profit#6, ws_sold_date_sk#7] Arguments: [ws_item_sk#1, ws_web_page_sk#2, ws_order_number#3, ws_quantity#4, ws_sales_price#5, ws_net_profit#6, ws_sold_date_sk#7] -(unknown) Scan parquet spark_catalog.default.web_returns +(4) Scan parquet spark_catalog.default.web_returns Output [9]: [wr_item_sk#9, wr_refunded_cdemo_sk#10, wr_refunded_addr_sk#11, wr_returning_cdemo_sk#12, wr_reason_sk#13, wr_order_number#14, wr_fee#15, wr_refunded_cash#16, wr_returned_date_sk#17] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -91,7 +91,7 @@ Arguments: [ws_web_page_sk#2, ws_quantity#4, ws_sales_price#5, ws_net_profit#6, (9) ColumnarToRow [codegen id : 7] Input [11]: [ws_web_page_sk#2, ws_quantity#4, ws_sales_price#5, ws_net_profit#6, ws_sold_date_sk#7, wr_refunded_cdemo_sk#10, wr_refunded_addr_sk#11, wr_returning_cdemo_sk#12, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16] -(unknown) Scan parquet spark_catalog.default.web_page +(10) Scan parquet spark_catalog.default.web_page Output [1]: [wp_web_page_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/web_page] @@ -119,7 +119,7 @@ Join condition: None Output [10]: [ws_quantity#4, ws_sales_price#5, ws_net_profit#6, ws_sold_date_sk#7, wr_refunded_cdemo_sk#10, wr_refunded_addr_sk#11, wr_returning_cdemo_sk#12, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16] Input [12]: [ws_web_page_sk#2, ws_quantity#4, ws_sales_price#5, ws_net_profit#6, ws_sold_date_sk#7, wr_refunded_cdemo_sk#10, wr_refunded_addr_sk#11, wr_returning_cdemo_sk#12, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16, wp_web_page_sk#18] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(16) Scan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#19, cd_marital_status#20, cd_education_status#21] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -147,7 +147,7 @@ Join condition: ((((((cd_marital_status#20 = M) AND (cd_education_status#21 = Ad Output [10]: [ws_quantity#4, ws_net_profit#6, ws_sold_date_sk#7, wr_refunded_addr_sk#11, wr_returning_cdemo_sk#12, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16, cd_marital_status#20, cd_education_status#21] Input [13]: [ws_quantity#4, ws_sales_price#5, ws_net_profit#6, ws_sold_date_sk#7, wr_refunded_cdemo_sk#10, wr_refunded_addr_sk#11, wr_returning_cdemo_sk#12, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16, cd_demo_sk#19, cd_marital_status#20, cd_education_status#21] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(22) Scan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#22, cd_marital_status#23, cd_education_status#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -175,7 +175,7 @@ Join condition: None Output [7]: [ws_quantity#4, ws_net_profit#6, ws_sold_date_sk#7, wr_refunded_addr_sk#11, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16] Input [13]: [ws_quantity#4, ws_net_profit#6, ws_sold_date_sk#7, wr_refunded_addr_sk#11, wr_returning_cdemo_sk#12, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16, cd_marital_status#20, cd_education_status#21, cd_demo_sk#22, cd_marital_status#23, cd_education_status#24] -(unknown) Scan parquet spark_catalog.default.customer_address +(28) Scan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#25, ca_state#26, ca_country#27] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -220,7 +220,7 @@ Join condition: None Output [4]: [ws_quantity#4, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16] Input [6]: [ws_quantity#4, ws_sold_date_sk#7, wr_reason_sk#13, wr_fee#15, wr_refunded_cash#16, d_date_sk#28] -(unknown) Scan parquet spark_catalog.default.reason +(38) Scan parquet spark_catalog.default.reason Output [2]: [r_reason_sk#29, r_reason_desc#30] Batched: true Location [not included in comparison]/{warehouse_dir}/reason] @@ -280,7 +280,7 @@ BroadcastExchange (52) +- CometScan parquet spark_catalog.default.date_dim (48) -(unknown) Scan parquet spark_catalog.default.date_dim +(48) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#28, d_year#50] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q86/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q86/explain.txt index cab784da5..5bfd2925d 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q86/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q86/explain.txt @@ -22,7 +22,7 @@ TakeOrderedAndProject (21) +- CometScan parquet spark_catalog.default.item (7) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -50,7 +50,7 @@ Join condition: None Output [2]: [ws_item_sk#1, ws_net_paid#2] Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] -(unknown) Scan parquet spark_catalog.default.item +(7) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#6, i_class#7, i_category#8] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -130,7 +130,7 @@ BroadcastExchange (26) +- CometScan parquet spark_catalog.default.date_dim (22) -(unknown) Scan parquet spark_catalog.default.date_dim +(22) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#21] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt index a82b90a2a..a4c9f13ce 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q87/explain.txt @@ -48,7 +48,7 @@ +- ReusedExchange (36) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#1, ss_sold_date_sk#2] Batched: true Location: InMemoryFileIndex [] @@ -76,7 +76,7 @@ Join condition: None Output [2]: [ss_customer_sk#1, d_date#5] Input [4]: [ss_customer_sk#1, ss_sold_date_sk#2, d_date_sk#4, d_date#5] -(unknown) Scan parquet spark_catalog.default.customer +(7) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#6, c_first_name#7, c_last_name#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -122,7 +122,7 @@ Functions: [] Aggregate Attributes: [] Results [3]: [c_last_name#8, c_first_name#7, d_date#5] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(16) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_bill_customer_sk#9, cs_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -191,7 +191,7 @@ Right keys [6]: [coalesce(c_last_name#16, ), isnull(c_last_name#16), coalesce(c_ Join type: LeftAnti Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(30) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#17, ws_sold_date_sk#18] Batched: true Location: InMemoryFileIndex [] @@ -292,7 +292,7 @@ BroadcastExchange (52) +- CometScan parquet spark_catalog.default.date_dim (48) -(unknown) Scan parquet spark_catalog.default.date_dim +(48) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#4, d_date#5, d_month_seq#29] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt index 26821dfd2..a5f68e564 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q88/explain.txt @@ -183,7 +183,7 @@ +- ReusedExchange (175) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -201,7 +201,7 @@ Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1 (4) ColumnarToRow [codegen id : 4] Input [3]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(unknown) Scan parquet spark_catalog.default.household_demographics +(5) Scan parquet spark_catalog.default.household_demographics Output [3]: [hd_demo_sk#5, hd_dep_count#6, hd_vehicle_count#7] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -233,7 +233,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#1, ss_store_sk#3] Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] -(unknown) Scan parquet spark_catalog.default.time_dim +(12) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#8, t_hour#9, t_minute#10] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -265,7 +265,7 @@ Join condition: None Output [1]: [ss_store_sk#3] Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#8] -(unknown) Scan parquet spark_catalog.default.store +(19) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_store_name#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -315,7 +315,7 @@ Functions [1]: [count(1)] Aggregate Attributes [1]: [count(1)#15] Results [1]: [count(1)#15 AS h8_30_to_9#16] -(unknown) Scan parquet spark_catalog.default.store_sales +(29) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#17, ss_hdemo_sk#18, ss_store_sk#19, ss_sold_date_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -346,7 +346,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#17, ss_store_sk#19] Input [4]: [ss_sold_time_sk#17, ss_hdemo_sk#18, ss_store_sk#19, hd_demo_sk#21] -(unknown) Scan parquet spark_catalog.default.time_dim +(36) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#22, t_hour#23, t_minute#24] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -417,7 +417,7 @@ Arguments: IdentityBroadcastMode, [plan_id=7] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.store_sales +(51) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#30, ss_hdemo_sk#31, ss_store_sk#32, ss_sold_date_sk#33] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -448,7 +448,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#30, ss_store_sk#32] Input [4]: [ss_sold_time_sk#30, ss_hdemo_sk#31, ss_store_sk#32, hd_demo_sk#34] -(unknown) Scan parquet spark_catalog.default.time_dim +(58) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#35, t_hour#36, t_minute#37] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -519,7 +519,7 @@ Arguments: IdentityBroadcastMode, [plan_id=10] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.store_sales +(73) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#43, ss_hdemo_sk#44, ss_store_sk#45, ss_sold_date_sk#46] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -550,7 +550,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#43, ss_store_sk#45] Input [4]: [ss_sold_time_sk#43, ss_hdemo_sk#44, ss_store_sk#45, hd_demo_sk#47] -(unknown) Scan parquet spark_catalog.default.time_dim +(80) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#48, t_hour#49, t_minute#50] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -621,7 +621,7 @@ Arguments: IdentityBroadcastMode, [plan_id=13] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.store_sales +(95) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#56, ss_hdemo_sk#57, ss_store_sk#58, ss_sold_date_sk#59] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -652,7 +652,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#56, ss_store_sk#58] Input [4]: [ss_sold_time_sk#56, ss_hdemo_sk#57, ss_store_sk#58, hd_demo_sk#60] -(unknown) Scan parquet spark_catalog.default.time_dim +(102) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#61, t_hour#62, t_minute#63] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -723,7 +723,7 @@ Arguments: IdentityBroadcastMode, [plan_id=16] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.store_sales +(117) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#69, ss_hdemo_sk#70, ss_store_sk#71, ss_sold_date_sk#72] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -754,7 +754,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#69, ss_store_sk#71] Input [4]: [ss_sold_time_sk#69, ss_hdemo_sk#70, ss_store_sk#71, hd_demo_sk#73] -(unknown) Scan parquet spark_catalog.default.time_dim +(124) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#74, t_hour#75, t_minute#76] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -825,7 +825,7 @@ Arguments: IdentityBroadcastMode, [plan_id=19] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.store_sales +(139) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#82, ss_hdemo_sk#83, ss_store_sk#84, ss_sold_date_sk#85] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -856,7 +856,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#82, ss_store_sk#84] Input [4]: [ss_sold_time_sk#82, ss_hdemo_sk#83, ss_store_sk#84, hd_demo_sk#86] -(unknown) Scan parquet spark_catalog.default.time_dim +(146) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#87, t_hour#88, t_minute#89] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -927,7 +927,7 @@ Arguments: IdentityBroadcastMode, [plan_id=22] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.store_sales +(161) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#95, ss_hdemo_sk#96, ss_store_sk#97, ss_sold_date_sk#98] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -958,7 +958,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#95, ss_store_sk#97] Input [4]: [ss_sold_time_sk#95, ss_hdemo_sk#96, ss_store_sk#97, hd_demo_sk#99] -(unknown) Scan parquet spark_catalog.default.time_dim +(168) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#100, t_hour#101, t_minute#102] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt index a59560090..3657266e2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q89/explain.txt @@ -28,7 +28,7 @@ TakeOrderedAndProject (27) +- CometScan parquet spark_catalog.default.store (13) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -42,7 +42,7 @@ Condition : (((i_category#4 IN (Books (3) ColumnarToRow [codegen id : 4] Input [4]: [i_item_sk#1, i_brand#2, i_class#3, i_category#4] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#5, ss_store_sk#6, ss_sales_price#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -84,7 +84,7 @@ Join condition: None Output [6]: [i_brand#2, i_class#3, i_category#4, ss_store_sk#6, ss_sales_price#7, d_moy#11] Input [8]: [i_brand#2, i_class#3, i_category#4, ss_store_sk#6, ss_sales_price#7, ss_sold_date_sk#8, d_date_sk#10, d_moy#11] -(unknown) Scan parquet spark_catalog.default.store +(13) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -164,7 +164,7 @@ BroadcastExchange (32) +- CometScan parquet spark_catalog.default.date_dim (28) -(unknown) Scan parquet spark_catalog.default.date_dim +(28) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#21, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q9/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q9/explain.txt index 4fb9fc546..1e97f8a15 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q9/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q9/explain.txt @@ -5,7 +5,7 @@ +- CometScan parquet spark_catalog.default.reason (1) -(unknown) Scan parquet spark_catalog.default.reason +(1) Scan parquet spark_catalog.default.reason Output [1]: [r_reason_sk#1] Batched: true Location [not included in comparison]/{warehouse_dir}/reason] @@ -36,7 +36,7 @@ Subquery:1 Hosting operator id = 4 Hosting Expression = Subquery scalar-subquery +- CometScan parquet spark_catalog.default.store_sales (5) -(unknown) Scan parquet spark_catalog.default.store_sales +(5) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_quantity#17, ss_ext_discount_amt#18, ss_net_paid#19, ss_sold_date_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -89,7 +89,7 @@ Subquery:4 Hosting operator id = 4 Hosting Expression = Subquery scalar-subquery +- CometScan parquet spark_catalog.default.store_sales (13) -(unknown) Scan parquet spark_catalog.default.store_sales +(13) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_quantity#33, ss_ext_discount_amt#34, ss_net_paid#35, ss_sold_date_sk#36] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -142,7 +142,7 @@ Subquery:7 Hosting operator id = 4 Hosting Expression = Subquery scalar-subquery +- CometScan parquet spark_catalog.default.store_sales (21) -(unknown) Scan parquet spark_catalog.default.store_sales +(21) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_quantity#49, ss_ext_discount_amt#50, ss_net_paid#51, ss_sold_date_sk#52] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -195,7 +195,7 @@ Subquery:10 Hosting operator id = 4 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.store_sales (29) -(unknown) Scan parquet spark_catalog.default.store_sales +(29) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_quantity#65, ss_ext_discount_amt#66, ss_net_paid#67, ss_sold_date_sk#68] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -248,7 +248,7 @@ Subquery:13 Hosting operator id = 4 Hosting Expression = Subquery scalar-subquer +- CometScan parquet spark_catalog.default.store_sales (37) -(unknown) Scan parquet spark_catalog.default.store_sales +(37) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_quantity#81, ss_ext_discount_amt#82, ss_net_paid#83, ss_sold_date_sk#84] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q90/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q90/explain.txt index bb9bf128e..fcfbca847 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q90/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q90/explain.txt @@ -52,7 +52,7 @@ +- ReusedExchange (43) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_sold_time_sk#1, ws_ship_hdemo_sk#2, ws_web_page_sk#3, ws_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -70,7 +70,7 @@ Arguments: [ws_sold_time_sk#1, ws_ship_hdemo_sk#2, ws_web_page_sk#3], [ws_sold_t (4) ColumnarToRow [codegen id : 4] Input [3]: [ws_sold_time_sk#1, ws_ship_hdemo_sk#2, ws_web_page_sk#3] -(unknown) Scan parquet spark_catalog.default.household_demographics +(5) Scan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#5, hd_dep_count#6] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -102,7 +102,7 @@ Join condition: None Output [2]: [ws_sold_time_sk#1, ws_web_page_sk#3] Input [4]: [ws_sold_time_sk#1, ws_ship_hdemo_sk#2, ws_web_page_sk#3, hd_demo_sk#5] -(unknown) Scan parquet spark_catalog.default.time_dim +(12) Scan parquet spark_catalog.default.time_dim Output [2]: [t_time_sk#7, t_hour#8] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -134,7 +134,7 @@ Join condition: None Output [1]: [ws_web_page_sk#3] Input [3]: [ws_sold_time_sk#1, ws_web_page_sk#3, t_time_sk#7] -(unknown) Scan parquet spark_catalog.default.web_page +(19) Scan parquet spark_catalog.default.web_page Output [2]: [wp_web_page_sk#9, wp_char_count#10] Batched: true Location [not included in comparison]/{warehouse_dir}/web_page] @@ -184,7 +184,7 @@ Functions [1]: [count(1)] Aggregate Attributes [1]: [count(1)#13] Results [1]: [count(1)#13 AS amc#14] -(unknown) Scan parquet spark_catalog.default.web_sales +(29) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_sold_time_sk#15, ws_ship_hdemo_sk#16, ws_web_page_sk#17, ws_sold_date_sk#18] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -215,7 +215,7 @@ Join condition: None Output [2]: [ws_sold_time_sk#15, ws_web_page_sk#17] Input [4]: [ws_sold_time_sk#15, ws_ship_hdemo_sk#16, ws_web_page_sk#17, hd_demo_sk#19] -(unknown) Scan parquet spark_catalog.default.time_dim +(36) Scan parquet spark_catalog.default.time_dim Output [2]: [t_time_sk#20, t_hour#21] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q91/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q91/explain.txt index c8110434e..61f35489a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q91/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q91/explain.txt @@ -44,7 +44,7 @@ +- CometScan parquet spark_catalog.default.household_demographics (32) -(unknown) Scan parquet spark_catalog.default.call_center +(1) Scan parquet spark_catalog.default.call_center Output [4]: [cc_call_center_sk#1, cc_call_center_id#2, cc_name#3, cc_manager#4] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -58,7 +58,7 @@ Condition : isnotnull(cc_call_center_sk#1) (3) ColumnarToRow [codegen id : 7] Input [4]: [cc_call_center_sk#1, cc_call_center_id#2, cc_name#3, cc_manager#4] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(4) Scan parquet spark_catalog.default.catalog_returns Output [4]: [cr_returning_customer_sk#5, cr_call_center_sk#6, cr_net_loss#7, cr_returned_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ Join condition: None Output [5]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_returning_customer_sk#5, cr_net_loss#7] Input [7]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_returning_customer_sk#5, cr_net_loss#7, cr_returned_date_sk#8, d_date_sk#10] -(unknown) Scan parquet spark_catalog.default.customer +(13) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#11, c_current_cdemo_sk#12, c_current_hdemo_sk#13, c_current_addr_sk#14] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -128,7 +128,7 @@ Join condition: None Output [7]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_net_loss#7, c_current_cdemo_sk#12, c_current_hdemo_sk#13, c_current_addr_sk#14] Input [9]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_returning_customer_sk#5, cr_net_loss#7, c_customer_sk#11, c_current_cdemo_sk#12, c_current_hdemo_sk#13, c_current_addr_sk#14] -(unknown) Scan parquet spark_catalog.default.customer_address +(19) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#15, ca_gmt_offset#16] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -160,7 +160,7 @@ Join condition: None Output [6]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_net_loss#7, c_current_cdemo_sk#12, c_current_hdemo_sk#13] Input [8]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_net_loss#7, c_current_cdemo_sk#12, c_current_hdemo_sk#13, c_current_addr_sk#14, ca_address_sk#15] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(26) Scan parquet spark_catalog.default.customer_demographics Output [3]: [cd_demo_sk#17, cd_marital_status#18, cd_education_status#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -188,7 +188,7 @@ Join condition: None Output [7]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_net_loss#7, c_current_hdemo_sk#13, cd_marital_status#18, cd_education_status#19] Input [9]: [cc_call_center_id#2, cc_name#3, cc_manager#4, cr_net_loss#7, c_current_cdemo_sk#12, c_current_hdemo_sk#13, cd_demo_sk#17, cd_marital_status#18, cd_education_status#19] -(unknown) Scan parquet spark_catalog.default.household_demographics +(32) Scan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#20, hd_buy_potential#21] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -256,7 +256,7 @@ BroadcastExchange (48) +- CometScan parquet spark_catalog.default.date_dim (44) -(unknown) Scan parquet spark_catalog.default.date_dim +(44) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#10, d_year#29, d_moy#30] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt index 111c30960..5f1f96168 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q92/explain.txt @@ -30,7 +30,7 @@ +- ReusedExchange (24) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -45,7 +45,7 @@ Condition : (isnotnull(ws_item_sk#1) AND isnotnull(ws_ext_discount_amt#2)) (3) ColumnarToRow [codegen id : 6] Input [3]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#5, i_manufact_id#6] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -77,7 +77,7 @@ Join condition: None Output [3]: [ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5] Input [4]: [ws_item_sk#1, ws_ext_discount_amt#2, ws_sold_date_sk#3, i_item_sk#5] -(unknown) Scan parquet spark_catalog.default.web_sales +(11) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#7, ws_ext_discount_amt#8, ws_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -182,7 +182,7 @@ BroadcastExchange (34) +- CometScan parquet spark_catalog.default.date_dim (30) -(unknown) Scan parquet spark_catalog.default.date_dim +(30) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#18, d_date#23] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt index ee17b5291..00ed822f2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q93/explain.txt @@ -25,7 +25,7 @@ TakeOrderedAndProject (24) +- CometScan parquet spark_catalog.default.reason (14) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -46,7 +46,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#3, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5] Arguments: [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#3 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(6) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10, sr_returned_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -82,7 +82,7 @@ Join condition: None Output [5]: [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, sr_return_quantity#10] Input [9]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_sales_price#5, sr_item_sk#7, sr_reason_sk#8, sr_ticket_number#9, sr_return_quantity#10] -(unknown) Scan parquet spark_catalog.default.reason +(14) Scan parquet spark_catalog.default.reason Output [2]: [r_reason_sk#12, r_reason_desc#13] Batched: true Location [not included in comparison]/{warehouse_dir}/reason] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt index 1ed6d0cb3..d71f96e15 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q94/explain.txt @@ -46,7 +46,7 @@ +- CometScan parquet spark_catalog.default.web_site (34) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [8]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ws_sold_date_sk#8] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -72,7 +72,7 @@ Arguments: hashpartitioning(ws_order_number#5, 5), ENSURE_REQUIREMENTS, [plan_id Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Arguments: [ws_order_number#5 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_sales +(7) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#9, ws_order_number#10, ws_sold_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -103,7 +103,7 @@ Join condition: NOT (ws_warehouse_sk#4 = ws_warehouse_sk#9) Output [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_warehouse_sk#4, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] -(unknown) Scan parquet spark_catalog.default.web_returns +(14) Scan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#12, wr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -130,7 +130,7 @@ Right keys [1]: [wr_order_number#12] Join type: LeftAnti Join condition: None -(unknown) Scan parquet spark_catalog.default.date_dim +(20) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -162,7 +162,7 @@ Join condition: None Output [5]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.customer_address +(27) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#16, ca_state#17] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -194,7 +194,7 @@ Join condition: None Output [4]: [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7] Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_profit#7, ca_address_sk#16] -(unknown) Scan parquet spark_catalog.default.web_site +(34) Scan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#18, web_company_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt index d8686b2b6..fd5f78427 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q95/explain.txt @@ -59,7 +59,7 @@ +- CometScan parquet spark_catalog.default.web_site (47) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ws_sold_date_sk#7] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -85,7 +85,7 @@ Arguments: hashpartitioning(ws_order_number#4, 5), ENSURE_REQUIREMENTS, [plan_id Input [6]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] Arguments: [ws_order_number#4 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_sales +(7) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_warehouse_sk#8, ws_order_number#9, ws_sold_date_sk#10] Batched: true Location [not included in comparison]/{warehouse_dir}/web_sales] @@ -134,7 +134,7 @@ Right keys [1]: [ws_order_number#9] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_returns +(18) Scan parquet spark_catalog.default.web_returns Output [2]: [wr_order_number#13, wr_returned_date_sk#14] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -200,7 +200,7 @@ Right keys [1]: [wr_order_number#13] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.date_dim +(33) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#15, d_date#16] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -232,7 +232,7 @@ Join condition: None Output [5]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] Input [7]: [ws_ship_date_sk#1, ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, d_date_sk#15] -(unknown) Scan parquet spark_catalog.default.customer_address +(40) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#17, ca_state#18] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -264,7 +264,7 @@ Join condition: None Output [4]: [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6] Input [6]: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_profit#6, ca_address_sk#17] -(unknown) Scan parquet spark_catalog.default.web_site +(47) Scan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#19, web_company_name#20] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt index b55971ac8..60c262e9c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q96/explain.txt @@ -29,7 +29,7 @@ +- CometScan parquet spark_catalog.default.store (19) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_sold_date_sk#4] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -47,7 +47,7 @@ Arguments: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3], [ss_sold_time_sk#1 (4) ColumnarToRow [codegen id : 4] Input [3]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3] -(unknown) Scan parquet spark_catalog.default.household_demographics +(5) Scan parquet spark_catalog.default.household_demographics Output [2]: [hd_demo_sk#5, hd_dep_count#6] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -79,7 +79,7 @@ Join condition: None Output [2]: [ss_sold_time_sk#1, ss_store_sk#3] Input [4]: [ss_sold_time_sk#1, ss_hdemo_sk#2, ss_store_sk#3, hd_demo_sk#5] -(unknown) Scan parquet spark_catalog.default.time_dim +(12) Scan parquet spark_catalog.default.time_dim Output [3]: [t_time_sk#7, t_hour#8, t_minute#9] Batched: true Location [not included in comparison]/{warehouse_dir}/time_dim] @@ -111,7 +111,7 @@ Join condition: None Output [1]: [ss_store_sk#3] Input [3]: [ss_sold_time_sk#1, ss_store_sk#3, t_time_sk#7] -(unknown) Scan parquet spark_catalog.default.store +(19) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#10, s_store_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt index 512037f6a..66ccf4f22 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q97/explain.txt @@ -24,7 +24,7 @@ +- ReusedExchange (12) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_customer_sk#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -69,7 +69,7 @@ Results [2]: [ss_customer_sk#2 AS customer_sk#6, ss_item_sk#1 AS item_sk#7] Input [2]: [customer_sk#6, item_sk#7] Arguments: [customer_sk#6 ASC NULLS FIRST, item_sk#7 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_sales +(10) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_bill_customer_sk#8, cs_item_sk#9, cs_sold_date_sk#10] Batched: true Location: InMemoryFileIndex [] @@ -152,7 +152,7 @@ BroadcastExchange (28) +- CometScan parquet spark_catalog.default.date_dim (24) -(unknown) Scan parquet spark_catalog.default.date_dim +(24) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#27] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt index b69f69081..3d66a07d0 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q98/explain.txt @@ -23,7 +23,7 @@ +- ReusedExchange (10) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -38,7 +38,7 @@ Condition : isnotnull(ss_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -135,7 +135,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(unknown) Scan parquet spark_catalog.default.date_dim +(23) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt index 8420e644c..936555026 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4/q99/explain.txt @@ -33,7 +33,7 @@ TakeOrderedAndProject (32) +- CometScan parquet spark_catalog.default.date_dim (22) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [5]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_warehouse_sk#4, cs_sold_date_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_sales] @@ -47,7 +47,7 @@ Condition : (((isnotnull(cs_warehouse_sk#4) AND isnotnull(cs_ship_mode_sk#3)) AN (3) ColumnarToRow [codegen id : 5] Input [5]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_warehouse_sk#4, cs_sold_date_sk#5] -(unknown) Scan parquet spark_catalog.default.warehouse +(4) Scan parquet spark_catalog.default.warehouse Output [2]: [w_warehouse_sk#6, w_warehouse_name#7] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -75,7 +75,7 @@ Join condition: None Output [5]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_sold_date_sk#5, w_warehouse_name#7] Input [7]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_warehouse_sk#4, cs_sold_date_sk#5, w_warehouse_sk#6, w_warehouse_name#7] -(unknown) Scan parquet spark_catalog.default.ship_mode +(10) Scan parquet spark_catalog.default.ship_mode Output [2]: [sm_ship_mode_sk#8, sm_type#9] Batched: true Location [not included in comparison]/{warehouse_dir}/ship_mode] @@ -103,7 +103,7 @@ Join condition: None Output [5]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_sold_date_sk#5, w_warehouse_name#7, sm_type#9] Input [7]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_ship_mode_sk#3, cs_sold_date_sk#5, w_warehouse_name#7, sm_ship_mode_sk#8, sm_type#9] -(unknown) Scan parquet spark_catalog.default.call_center +(16) Scan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#10, cc_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -131,7 +131,7 @@ Join condition: None Output [5]: [cs_ship_date_sk#1, cs_sold_date_sk#5, w_warehouse_name#7, sm_type#9, cc_name#11] Input [7]: [cs_ship_date_sk#1, cs_call_center_sk#2, cs_sold_date_sk#5, w_warehouse_name#7, sm_type#9, cc_call_center_sk#10, cc_name#11] -(unknown) Scan parquet spark_catalog.default.date_dim +(22) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#12, d_month_seq#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt index ea5dac96e..be0e98db2 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q10a/explain.txt @@ -42,7 +42,7 @@ TakeOrderedAndProject (41) +- CometScan parquet spark_catalog.default.customer_demographics (32) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -56,7 +56,7 @@ Condition : (isnotnull(c_current_addr_sk#3) AND isnotnull(c_current_cdemo_sk#2)) (3) ColumnarToRow [codegen id : 9] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -89,7 +89,7 @@ Right keys [1]: [ss_customer_sk#4] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(11) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#8, ws_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -112,7 +112,7 @@ Join condition: None Output [1]: [ws_bill_customer_sk#8 AS customer_sk#12] Input [3]: [ws_bill_customer_sk#8, ws_sold_date_sk#9, d_date_sk#11] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(16) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#13, cs_sold_date_sk#14] Batched: true Location: InMemoryFileIndex [] @@ -151,7 +151,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#2, c_current_addr_sk#3] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(unknown) Scan parquet spark_catalog.default.customer_address +(25) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_county#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -183,7 +183,7 @@ Join condition: None Output [1]: [c_current_cdemo_sk#2] Input [3]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#18] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(32) Scan parquet spark_catalog.default.customer_demographics Output [9]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_education_status#23, cd_purchase_estimate#24, cd_credit_rating#25, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -243,7 +243,7 @@ BroadcastExchange (46) +- CometScan parquet spark_catalog.default.date_dim (42) -(unknown) Scan parquet spark_catalog.default.date_dim +(42) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#38, d_moy#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt index befc87707..daa1f5243 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q11/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (62) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -86,7 +86,7 @@ Condition : (isnotnull(c_customer_sk#1) AND isnotnull(c_customer_id#2)) (3) ColumnarToRow [codegen id : 3] Input [8]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4, c_preferred_cust_flag#5, c_birth_country#6, c_login#7, c_email_address#8] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#9, ss_ext_discount_amt#10, ss_ext_list_price#11, ss_sold_date_sk#12] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Results [2]: [c_customer_id#2 AS customer_id#19, MakeDecimal(sum(UnscaledValue(( Input [2]: [customer_id#19, year_total#20] Condition : (isnotnull(year_total#20) AND (year_total#20 > 0.00)) -(unknown) Scan parquet spark_catalog.default.customer +(17) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#21, c_customer_id#22, c_first_name#23, c_last_name#24, c_preferred_cust_flag#25, c_birth_country#26, c_login#27, c_email_address#28] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -164,7 +164,7 @@ Condition : (isnotnull(c_customer_sk#21) AND isnotnull(c_customer_id#22)) (19) ColumnarToRow [codegen id : 6] Input [8]: [c_customer_sk#21, c_customer_id#22, c_first_name#23, c_last_name#24, c_preferred_cust_flag#25, c_birth_country#26, c_login#27, c_email_address#28] -(unknown) Scan parquet spark_catalog.default.store_sales +(20) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_customer_sk#29, ss_ext_discount_amt#30, ss_ext_list_price#31, ss_sold_date_sk#32] Batched: true Location: InMemoryFileIndex [] @@ -234,7 +234,7 @@ Right keys [1]: [customer_id#38] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.customer +(34) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#43, c_customer_id#44, c_first_name#45, c_last_name#46, c_preferred_cust_flag#47, c_birth_country#48, c_login#49, c_email_address#50] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -248,7 +248,7 @@ Condition : (isnotnull(c_customer_sk#43) AND isnotnull(c_customer_id#44)) (36) ColumnarToRow [codegen id : 10] Input [8]: [c_customer_sk#43, c_customer_id#44, c_first_name#45, c_last_name#46, c_preferred_cust_flag#47, c_birth_country#48, c_login#49, c_email_address#50] -(unknown) Scan parquet spark_catalog.default.web_sales +(37) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_bill_customer_sk#51, ws_ext_discount_amt#52, ws_ext_list_price#53, ws_sold_date_sk#54] Batched: true Location: InMemoryFileIndex [] @@ -326,7 +326,7 @@ Join condition: None Output [8]: [customer_id#19, year_total#20, customer_id#38, customer_first_name#39, customer_last_name#40, customer_email_address#41, year_total#42, year_total#62] Input [9]: [customer_id#19, year_total#20, customer_id#38, customer_first_name#39, customer_last_name#40, customer_email_address#41, year_total#42, customer_id#61, year_total#62] -(unknown) Scan parquet spark_catalog.default.customer +(53) Scan parquet spark_catalog.default.customer Output [8]: [c_customer_sk#63, c_customer_id#64, c_first_name#65, c_last_name#66, c_preferred_cust_flag#67, c_birth_country#68, c_login#69, c_email_address#70] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -340,7 +340,7 @@ Condition : (isnotnull(c_customer_sk#63) AND isnotnull(c_customer_id#64)) (55) ColumnarToRow [codegen id : 14] Input [8]: [c_customer_sk#63, c_customer_id#64, c_first_name#65, c_last_name#66, c_preferred_cust_flag#67, c_birth_country#68, c_login#69, c_email_address#70] -(unknown) Scan parquet spark_catalog.default.web_sales +(56) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_bill_customer_sk#71, ws_ext_discount_amt#72, ws_ext_list_price#73, ws_sold_date_sk#74] Batched: true Location: InMemoryFileIndex [] @@ -427,7 +427,7 @@ BroadcastExchange (75) +- CometScan parquet spark_catalog.default.date_dim (72) -(unknown) Scan parquet spark_catalog.default.date_dim +(72) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_year#15] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -452,7 +452,7 @@ BroadcastExchange (79) +- CometScan parquet spark_catalog.default.date_dim (76) -(unknown) Scan parquet spark_catalog.default.date_dim +(76) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#34, d_year#35] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt index f3c5c4609..c39a71879 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q12/explain.txt @@ -21,7 +21,7 @@ TakeOrderedAndProject (20) +- ReusedExchange (10) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -36,7 +36,7 @@ Condition : isnotnull(ws_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [3]: [ws_item_sk#1, ws_ext_sales_price#2, ws_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -125,7 +125,7 @@ BroadcastExchange (25) +- CometScan parquet spark_catalog.default.date_dim (21) -(unknown) Scan parquet spark_catalog.default.date_dim +(21) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt index 6fdb365c5..1e7dfdbdf 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14/explain.txt @@ -85,7 +85,7 @@ TakeOrderedAndProject (84) +- ReusedExchange (75) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -100,7 +100,7 @@ Condition : isnotnull(ss_item_sk#1) (3) ColumnarToRow [codegen id : 25] Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -114,7 +114,7 @@ Condition : ((isnotnull(i_brand_id#7) AND isnotnull(i_class_id#8)) AND isnotnull (6) ColumnarToRow [codegen id : 11] Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] -(unknown) Scan parquet spark_catalog.default.store_sales +(7) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#10, ss_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -129,7 +129,7 @@ Condition : isnotnull(ss_item_sk#10) (9) ColumnarToRow [codegen id : 6] Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] -(unknown) Scan parquet spark_catalog.default.item +(10) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -143,7 +143,7 @@ Condition : (((isnotnull(i_item_sk#13) AND isnotnull(i_brand_id#14)) AND isnotnu (12) ColumnarToRow [codegen id : 4] Input [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(13) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#17, cs_sold_date_sk#18] Batched: true Location: InMemoryFileIndex [] @@ -158,7 +158,7 @@ Condition : isnotnull(cs_item_sk#17) (15) ColumnarToRow [codegen id : 3] Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] -(unknown) Scan parquet spark_catalog.default.item +(16) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -254,7 +254,7 @@ Functions: [] Aggregate Attributes: [] Results [3]: [brand_id#26, class_id#27, category_id#28] -(unknown) Scan parquet spark_catalog.default.web_sales +(36) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#29, ws_sold_date_sk#30] Batched: true Location: InMemoryFileIndex [] @@ -329,7 +329,7 @@ Right keys [1]: [ss_item_sk#37] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.item +(52) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#38, i_brand_id#39, i_class_id#40, i_category_id#41] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -401,7 +401,7 @@ Results [6]: [store AS channel#51, i_brand_id#39, i_class_id#40, i_category_id#4 Input [6]: [channel#51, i_brand_id#39, i_class_id#40, i_category_id#41, sales#52, number_sales#53] Condition : (isnotnull(sales#52) AND (cast(sales#52 as decimal(32,6)) > cast(Subquery scalar-subquery#54, [id=#55] as decimal(32,6)))) -(unknown) Scan parquet spark_catalog.default.store_sales +(67) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#56, ss_quantity#57, ss_list_price#58, ss_sold_date_sk#59] Batched: true Location: InMemoryFileIndex [] @@ -511,7 +511,7 @@ Subquery:1 Hosting operator id = 66 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (97) -(unknown) Scan parquet spark_catalog.default.store_sales +(85) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#77, ss_list_price#78, ss_sold_date_sk#79] Batched: true Location: InMemoryFileIndex [] @@ -534,7 +534,7 @@ Join condition: None Output [2]: [ss_quantity#77 AS quantity#82, ss_list_price#78 AS list_price#83] Input [4]: [ss_quantity#77, ss_list_price#78, ss_sold_date_sk#79, d_date_sk#81] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(90) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#84, cs_list_price#85, cs_sold_date_sk#86] Batched: true Location: InMemoryFileIndex [] @@ -557,7 +557,7 @@ Join condition: None Output [2]: [cs_quantity#84 AS quantity#89, cs_list_price#85 AS list_price#90] Input [4]: [cs_quantity#84, cs_list_price#85, cs_sold_date_sk#86, d_date_sk#88] -(unknown) Scan parquet spark_catalog.default.web_sales +(95) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#91, ws_list_price#92, ws_sold_date_sk#93] Batched: true Location: InMemoryFileIndex [] @@ -614,7 +614,7 @@ BroadcastExchange (108) +- CometScan parquet spark_catalog.default.date_dim (104) -(unknown) Scan parquet spark_catalog.default.date_dim +(104) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#42, d_week_seq#104] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -643,7 +643,7 @@ Subquery:6 Hosting operator id = 105 Hosting Expression = Subquery scalar-subque +- CometScan parquet spark_catalog.default.date_dim (109) -(unknown) Scan parquet spark_catalog.default.date_dim +(109) Scan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#107, d_year#108, d_moy#109, d_dom#110] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -669,7 +669,7 @@ BroadcastExchange (117) +- CometScan parquet spark_catalog.default.date_dim (113) -(unknown) Scan parquet spark_catalog.default.date_dim +(113) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#25, d_year#111] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -705,7 +705,7 @@ BroadcastExchange (122) +- CometScan parquet spark_catalog.default.date_dim (118) -(unknown) Scan parquet spark_catalog.default.date_dim +(118) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#65, d_week_seq#112] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -734,7 +734,7 @@ Subquery:12 Hosting operator id = 119 Hosting Expression = Subquery scalar-subqu +- CometScan parquet spark_catalog.default.date_dim (123) -(unknown) Scan parquet spark_catalog.default.date_dim +(123) Scan parquet spark_catalog.default.date_dim Output [4]: [d_week_seq#115, d_year#116, d_moy#117, d_dom#118] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt index a8db177f8..88d8caaa1 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q14a/explain.txt @@ -126,7 +126,7 @@ TakeOrderedAndProject (125) +- ReusedExchange (116) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -141,7 +141,7 @@ Condition : isnotnull(ss_item_sk#1) (3) ColumnarToRow [codegen id : 25] Input [4]: [ss_item_sk#1, ss_quantity#2, ss_list_price#3, ss_sold_date_sk#4] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -155,7 +155,7 @@ Condition : ((isnotnull(i_brand_id#7) AND isnotnull(i_class_id#8)) AND isnotnull (6) ColumnarToRow [codegen id : 11] Input [4]: [i_item_sk#6, i_brand_id#7, i_class_id#8, i_category_id#9] -(unknown) Scan parquet spark_catalog.default.store_sales +(7) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_item_sk#10, ss_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -170,7 +170,7 @@ Condition : isnotnull(ss_item_sk#10) (9) ColumnarToRow [codegen id : 6] Input [2]: [ss_item_sk#10, ss_sold_date_sk#11] -(unknown) Scan parquet spark_catalog.default.item +(10) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -184,7 +184,7 @@ Condition : (((isnotnull(i_item_sk#13) AND isnotnull(i_brand_id#14)) AND isnotnu (12) ColumnarToRow [codegen id : 4] Input [4]: [i_item_sk#13, i_brand_id#14, i_class_id#15, i_category_id#16] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(13) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_item_sk#17, cs_sold_date_sk#18] Batched: true Location: InMemoryFileIndex [] @@ -199,7 +199,7 @@ Condition : isnotnull(cs_item_sk#17) (15) ColumnarToRow [codegen id : 3] Input [2]: [cs_item_sk#17, cs_sold_date_sk#18] -(unknown) Scan parquet spark_catalog.default.item +(16) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#20, i_brand_id#21, i_class_id#22, i_category_id#23] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -295,7 +295,7 @@ Functions: [] Aggregate Attributes: [] Results [3]: [brand_id#26, class_id#27, category_id#28] -(unknown) Scan parquet spark_catalog.default.web_sales +(36) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_item_sk#29, ws_sold_date_sk#30] Batched: true Location: InMemoryFileIndex [] @@ -370,7 +370,7 @@ Right keys [1]: [ss_item_sk#37] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.item +(52) Scan parquet spark_catalog.default.item Output [4]: [i_item_sk#38, i_brand_id#39, i_class_id#40, i_category_id#41] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -442,7 +442,7 @@ Results [6]: [store AS channel#51, i_brand_id#39, i_class_id#40, i_category_id#4 Input [6]: [channel#51, i_brand_id#39, i_class_id#40, i_category_id#41, sales#52, number_sales#53] Condition : (isnotnull(sales#52) AND (cast(sales#52 as decimal(32,6)) > cast(Subquery scalar-subquery#54, [id=#55] as decimal(32,6)))) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(67) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_item_sk#56, cs_quantity#57, cs_list_price#58, cs_sold_date_sk#59] Batched: true Location: InMemoryFileIndex [] @@ -514,7 +514,7 @@ Results [6]: [catalog AS channel#74, i_brand_id#62, i_class_id#63, i_category_id Input [6]: [channel#74, i_brand_id#62, i_class_id#63, i_category_id#64, sales#75, number_sales#76] Condition : (isnotnull(sales#75) AND (cast(sales#75 as decimal(32,6)) > cast(ReusedSubquery Subquery scalar-subquery#54, [id=#55] as decimal(32,6)))) -(unknown) Scan parquet spark_catalog.default.web_sales +(82) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_item_sk#77, ws_quantity#78, ws_list_price#79, ws_sold_date_sk#80] Batched: true Location: InMemoryFileIndex [] @@ -766,7 +766,7 @@ Subquery:1 Hosting operator id = 66 Hosting Expression = Subquery scalar-subquer +- ReusedExchange (138) -(unknown) Scan parquet spark_catalog.default.store_sales +(126) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_quantity#158, ss_list_price#159, ss_sold_date_sk#160] Batched: true Location: InMemoryFileIndex [] @@ -789,7 +789,7 @@ Join condition: None Output [2]: [ss_quantity#158 AS quantity#163, ss_list_price#159 AS list_price#164] Input [4]: [ss_quantity#158, ss_list_price#159, ss_sold_date_sk#160, d_date_sk#162] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(131) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_quantity#165, cs_list_price#166, cs_sold_date_sk#167] Batched: true Location: InMemoryFileIndex [] @@ -812,7 +812,7 @@ Join condition: None Output [2]: [cs_quantity#165 AS quantity#170, cs_list_price#166 AS list_price#171] Input [4]: [cs_quantity#165, cs_list_price#166, cs_sold_date_sk#167, d_date_sk#169] -(unknown) Scan parquet spark_catalog.default.web_sales +(136) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_quantity#172, ws_list_price#173, ws_sold_date_sk#174] Batched: true Location: InMemoryFileIndex [] @@ -865,7 +865,7 @@ BroadcastExchange (149) +- CometScan parquet spark_catalog.default.date_dim (145) -(unknown) Scan parquet spark_catalog.default.date_dim +(145) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#169, d_year#185] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -897,7 +897,7 @@ BroadcastExchange (154) +- CometScan parquet spark_catalog.default.date_dim (150) -(unknown) Scan parquet spark_catalog.default.date_dim +(150) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#42, d_year#186, d_moy#187] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -927,7 +927,7 @@ BroadcastExchange (159) +- CometScan parquet spark_catalog.default.date_dim (155) -(unknown) Scan parquet spark_catalog.default.date_dim +(155) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#25, d_year#188] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt index 4d23b269c..6bd645986 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q18a/explain.txt @@ -154,7 +154,7 @@ TakeOrderedAndProject (153) +- CometScan parquet spark_catalog.default.item (143) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -169,7 +169,7 @@ Condition : ((isnotnull(cs_bill_cdemo_sk#2) AND isnotnull(cs_bill_customer_sk#1) (3) ColumnarToRow [codegen id : 7] Input [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(4) Scan parquet spark_catalog.default.customer_demographics Output [4]: [cd_demo_sk#11, cd_gender#12, cd_education_status#13, cd_dep_count#14] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -201,7 +201,7 @@ Join condition: None Output [9]: [cs_bill_customer_sk#1, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14] Input [11]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_demo_sk#11, cd_dep_count#14] -(unknown) Scan parquet spark_catalog.default.customer +(11) Scan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#15, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_month#18, c_birth_year#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -233,7 +233,7 @@ Join condition: None Output [11]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19] Input [13]: [cs_bill_customer_sk#1, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_customer_sk#15, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(18) Scan parquet spark_catalog.default.customer_demographics Output [1]: [cd_demo_sk#20] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -261,7 +261,7 @@ Join condition: None Output [10]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_addr_sk#17, c_birth_year#19] Input [12]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19, cd_demo_sk#20] -(unknown) Scan parquet spark_catalog.default.customer_address +(24) Scan parquet spark_catalog.default.customer_address Output [4]: [ca_address_sk#21, ca_county#22, ca_state#23, ca_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -302,7 +302,7 @@ Join condition: None Output [11]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cd_dep_count#14, c_birth_year#19, ca_county#22, ca_state#23, ca_country#24] Input [13]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_birth_year#19, ca_county#22, ca_state#23, ca_country#24, d_date_sk#25] -(unknown) Scan parquet spark_catalog.default.item +(33) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#26, i_item_id#27] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -348,7 +348,7 @@ Functions [7]: [avg(agg1#28), avg(agg2#29), avg(agg3#30), avg(agg4#31), avg(agg5 Aggregate Attributes [7]: [avg(agg1#28)#63, avg(agg2#29)#64, avg(agg3#30)#65, avg(agg4#31)#66, avg(agg5#32)#67, avg(agg6#33)#68, avg(agg7#34)#69] Results [11]: [i_item_id#27, ca_country#24, ca_state#23, ca_county#22, avg(agg1#28)#63 AS agg1#70, avg(agg2#29)#64 AS agg2#71, avg(agg3#30)#65 AS agg3#72, avg(agg4#31)#66 AS agg4#73, avg(agg5#32)#67 AS agg5#74, avg(agg6#33)#68 AS agg6#75, avg(agg7#34)#69 AS agg7#76] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(42) Scan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -402,7 +402,7 @@ Join condition: None Output [10]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_addr_sk#17, c_birth_year#19] Input [12]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19, cd_demo_sk#20] -(unknown) Scan parquet spark_catalog.default.customer_address +(54) Scan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#21, ca_state#23, ca_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -474,7 +474,7 @@ Functions [7]: [avg(agg1#28), avg(agg2#29), avg(agg3#30), avg(agg4#31), avg(agg5 Aggregate Attributes [7]: [avg(agg1#28)#106, avg(agg2#29)#107, avg(agg3#30)#108, avg(agg4#31)#109, avg(agg5#32)#110, avg(agg6#33)#111, avg(agg7#34)#112] Results [11]: [i_item_id#27, ca_country#24, ca_state#23, null AS county#113, avg(agg1#28)#106 AS agg1#114, avg(agg2#29)#107 AS agg2#115, avg(agg3#30)#108 AS agg3#116, avg(agg4#31)#109 AS agg4#117, avg(agg5#32)#110 AS agg5#118, avg(agg6#33)#111 AS agg6#119, avg(agg7#34)#112 AS agg7#120] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(69) Scan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -528,7 +528,7 @@ Join condition: None Output [10]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_addr_sk#17, c_birth_year#19] Input [12]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19, cd_demo_sk#20] -(unknown) Scan parquet spark_catalog.default.customer_address +(81) Scan parquet spark_catalog.default.customer_address Output [3]: [ca_address_sk#21, ca_state#23, ca_country#24] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -604,7 +604,7 @@ Functions [7]: [avg(agg1#28), avg(agg2#29), avg(agg3#30), avg(agg4#31), avg(agg5 Aggregate Attributes [7]: [avg(agg1#28)#150, avg(agg2#29)#151, avg(agg3#30)#152, avg(agg4#31)#153, avg(agg5#32)#154, avg(agg6#33)#155, avg(agg7#34)#156] Results [11]: [i_item_id#27, ca_country#24, null AS ca_state#157, null AS county#158, avg(agg1#28)#150 AS agg1#159, avg(agg2#29)#151 AS agg2#160, avg(agg3#30)#152 AS agg3#161, avg(agg4#31)#153 AS agg4#162, avg(agg5#32)#154 AS agg5#163, avg(agg6#33)#155 AS agg6#164, avg(agg7#34)#156 AS agg7#165] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(97) Scan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -658,7 +658,7 @@ Join condition: None Output [10]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_addr_sk#17, c_birth_year#19] Input [12]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_current_cdemo_sk#16, c_current_addr_sk#17, c_birth_year#19, cd_demo_sk#20] -(unknown) Scan parquet spark_catalog.default.customer_address +(109) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#21, ca_state#23] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -734,7 +734,7 @@ Functions [7]: [avg(agg1#28), avg(agg2#29), avg(agg3#30), avg(agg4#31), avg(agg5 Aggregate Attributes [7]: [avg(agg1#28)#195, avg(agg2#29)#196, avg(agg3#30)#197, avg(agg4#31)#198, avg(agg5#32)#199, avg(agg6#33)#200, avg(agg7#34)#201] Results [11]: [i_item_id#27, null AS ca_country#202, null AS ca_state#203, null AS county#204, avg(agg1#28)#195 AS agg1#205, avg(agg2#29)#196 AS agg2#206, avg(agg3#30)#197 AS agg3#207, avg(agg4#31)#198 AS agg4#208, avg(agg5#32)#199 AS agg5#209, avg(agg6#33)#200 AS agg6#210, avg(agg7#34)#201 AS agg7#211] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(125) Scan parquet spark_catalog.default.catalog_sales Output [9]: [cs_bill_customer_sk#1, cs_bill_cdemo_sk#2, cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -814,7 +814,7 @@ Join condition: None Output [8]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cd_dep_count#14, c_birth_year#19] Input [10]: [cs_item_sk#3, cs_quantity#4, cs_list_price#5, cs_sales_price#6, cs_coupon_amt#7, cs_net_profit#8, cs_sold_date_sk#9, cd_dep_count#14, c_birth_year#19, d_date_sk#25] -(unknown) Scan parquet spark_catalog.default.item +(143) Scan parquet spark_catalog.default.item Output [1]: [i_item_sk#26] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -876,7 +876,7 @@ BroadcastExchange (158) +- CometScan parquet spark_catalog.default.date_dim (154) -(unknown) Scan parquet spark_catalog.default.date_dim +(154) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#25, d_year#259] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt index ad52796ed..1304af2e1 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q20/explain.txt @@ -21,7 +21,7 @@ TakeOrderedAndProject (20) +- ReusedExchange (10) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -36,7 +36,7 @@ Condition : isnotnull(cs_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [3]: [cs_item_sk#1, cs_ext_sales_price#2, cs_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -125,7 +125,7 @@ BroadcastExchange (25) +- CometScan parquet spark_catalog.default.date_dim (21) -(unknown) Scan parquet spark_catalog.default.date_dim +(21) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt index bdfd6eee0..fdebdc8a4 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22/explain.txt @@ -23,7 +23,7 @@ TakeOrderedAndProject (22) +- CometScan parquet spark_catalog.default.warehouse (13) -(unknown) Scan parquet spark_catalog.default.inventory +(1) Scan parquet spark_catalog.default.inventory Output [3]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -51,7 +51,7 @@ Join condition: None Output [2]: [inv_item_sk#1, inv_quantity_on_hand#2] Input [4]: [inv_item_sk#1, inv_quantity_on_hand#2, inv_date_sk#3, d_date_sk#5] -(unknown) Scan parquet spark_catalog.default.item +(7) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#6, i_brand#7, i_class#8, i_category#9, i_product_name#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -79,7 +79,7 @@ Join condition: None Output [5]: [inv_quantity_on_hand#2, i_brand#7, i_class#8, i_category#9, i_product_name#10] Input [7]: [inv_item_sk#1, inv_quantity_on_hand#2, i_item_sk#6, i_brand#7, i_class#8, i_category#9, i_product_name#10] -(unknown) Scan parquet spark_catalog.default.warehouse +(13) Scan parquet spark_catalog.default.warehouse Output: [] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -136,7 +136,7 @@ BroadcastExchange (27) +- CometScan parquet spark_catalog.default.date_dim (23) -(unknown) Scan parquet spark_catalog.default.date_dim +(23) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#22] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt index e0a290cea..3270618dc 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q22a/explain.txt @@ -46,7 +46,7 @@ TakeOrderedAndProject (45) +- ReusedExchange (39) -(unknown) Scan parquet spark_catalog.default.inventory +(1) Scan parquet spark_catalog.default.inventory Output [4]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -74,7 +74,7 @@ Join condition: None Output [3]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3] Input [5]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, inv_date_sk#4, d_date_sk#6] -(unknown) Scan parquet spark_catalog.default.item +(7) Scan parquet spark_catalog.default.item Output [5]: [i_item_sk#7, i_brand#8, i_class#9, i_category#10, i_product_name#11] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -102,7 +102,7 @@ Join condition: None Output [6]: [inv_warehouse_sk#2, inv_quantity_on_hand#3, i_brand#8, i_class#9, i_category#10, i_product_name#11] Input [8]: [inv_item_sk#1, inv_warehouse_sk#2, inv_quantity_on_hand#3, i_item_sk#7, i_brand#8, i_class#9, i_category#10, i_product_name#11] -(unknown) Scan parquet spark_catalog.default.warehouse +(13) Scan parquet spark_catalog.default.warehouse Output [1]: [w_warehouse_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/warehouse] @@ -290,7 +290,7 @@ BroadcastExchange (50) +- CometScan parquet spark_catalog.default.date_dim (46) -(unknown) Scan parquet spark_catalog.default.date_dim +(46) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_month_seq#59] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q24/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q24/explain.txt index bc8b8657e..8fd74c799 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q24/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q24/explain.txt @@ -49,7 +49,7 @@ +- CometScan parquet spark_catalog.default.customer_address (34) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, ss_sold_date_sk#6] Batched: true Location [not included in comparison]/{warehouse_dir}/store_sales] @@ -75,7 +75,7 @@ Arguments: hashpartitioning(ss_ticket_number#4, ss_item_sk#1, 5), ENSURE_REQUIRE Input [5]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5] Arguments: [ss_ticket_number#4 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(7) Scan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#7, sr_ticket_number#8, sr_returned_date_sk#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -111,7 +111,7 @@ Join condition: None Output [4]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5] Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_net_paid#5, sr_item_sk#7, sr_ticket_number#8] -(unknown) Scan parquet spark_catalog.default.store +(15) Scan parquet spark_catalog.default.store Output [5]: [s_store_sk#10, s_store_name#11, s_market_id#12, s_state#13, s_zip#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -143,7 +143,7 @@ Join condition: None Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] -(unknown) Scan parquet spark_catalog.default.item +(22) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -171,7 +171,7 @@ Join condition: None Output [10]: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Input [12]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] -(unknown) Scan parquet spark_catalog.default.customer +(28) Scan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#21, c_current_addr_sk#22, c_first_name#23, c_last_name#24, c_birth_country#25] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -199,7 +199,7 @@ Join condition: None Output [13]: [ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_current_addr_sk#22, c_first_name#23, c_last_name#24, c_birth_country#25] Input [15]: [ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20, c_customer_sk#21, c_current_addr_sk#22, c_first_name#23, c_last_name#24, c_birth_country#25] -(unknown) Scan parquet spark_catalog.default.customer_address +(34) Scan parquet spark_catalog.default.customer_address Output [4]: [ca_address_sk#26, ca_state#27, ca_zip#28, ca_country#29] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -344,7 +344,7 @@ Join condition: None Output [6]: [ss_item_sk#1, ss_customer_sk#2, ss_net_paid#5, s_store_name#11, s_state#13, s_zip#14] Input [8]: [ss_item_sk#1, ss_customer_sk#2, ss_store_sk#3, ss_net_paid#5, s_store_sk#10, s_store_name#11, s_state#13, s_zip#14] -(unknown) Scan parquet spark_catalog.default.item +(58) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#15, i_current_price#16, i_size#17, i_color#18, i_units#19, i_manager_id#20] Batched: true Location [not included in comparison]/{warehouse_dir}/item] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q27a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q27a/explain.txt index 54aadf2cf..03399fd5a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q27a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q27a/explain.txt @@ -74,7 +74,7 @@ TakeOrderedAndProject (73) +- CometScan parquet spark_catalog.default.item (63) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -89,7 +89,7 @@ Condition : ((isnotnull(ss_cdemo_sk#2) AND isnotnull(ss_store_sk#3)) AND isnotnu (3) ColumnarToRow [codegen id : 5] Input [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(4) Scan parquet spark_catalog.default.customer_demographics Output [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -134,7 +134,7 @@ Join condition: None Output [6]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7] Input [8]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.store +(14) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#15, s_state#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -162,7 +162,7 @@ Join condition: None Output [6]: [ss_item_sk#1, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, s_state#16] Input [8]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, s_store_sk#15, s_state#16] -(unknown) Scan parquet spark_catalog.default.item +(20) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#17, i_item_id#18] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -208,7 +208,7 @@ Functions [4]: [avg(agg1#19), avg(UnscaledValue(agg2#20)), avg(UnscaledValue(agg Aggregate Attributes [4]: [avg(agg1#19)#39, avg(UnscaledValue(agg2#20))#40, avg(UnscaledValue(agg3#21))#41, avg(UnscaledValue(agg4#22))#42] Results [7]: [i_item_id#18, s_state#16, 0 AS g_state#43, avg(agg1#19)#39 AS agg1#44, cast((avg(UnscaledValue(agg2#20))#40 / 100.0) as decimal(11,6)) AS agg2#45, cast((avg(UnscaledValue(agg3#21))#41 / 100.0) as decimal(11,6)) AS agg3#46, cast((avg(UnscaledValue(agg4#22))#42 / 100.0) as decimal(11,6)) AS agg4#47] -(unknown) Scan parquet spark_catalog.default.store_sales +(29) Scan parquet spark_catalog.default.store_sales Output [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -249,7 +249,7 @@ Join condition: None Output [6]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7] Input [8]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.store +(38) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#15, s_state#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -312,7 +312,7 @@ Functions [4]: [avg(agg1#19), avg(UnscaledValue(agg2#20)), avg(UnscaledValue(agg Aggregate Attributes [4]: [avg(agg1#19)#65, avg(UnscaledValue(agg2#20))#66, avg(UnscaledValue(agg3#21))#67, avg(UnscaledValue(agg4#22))#68] Results [7]: [i_item_id#18, null AS s_state#69, 1 AS g_state#70, avg(agg1#19)#65 AS agg1#71, cast((avg(UnscaledValue(agg2#20))#66 / 100.0) as decimal(11,6)) AS agg2#72, cast((avg(UnscaledValue(agg3#21))#67 / 100.0) as decimal(11,6)) AS agg3#73, cast((avg(UnscaledValue(agg4#22))#68 / 100.0) as decimal(11,6)) AS agg4#74] -(unknown) Scan parquet spark_catalog.default.store_sales +(51) Scan parquet spark_catalog.default.store_sales Output [8]: [ss_item_sk#1, ss_cdemo_sk#2, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, ss_sold_date_sk#8] Batched: true Location: InMemoryFileIndex [] @@ -366,7 +366,7 @@ Join condition: None Output [5]: [ss_item_sk#1, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7] Input [7]: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sales_price#6, ss_coupon_amt#7, s_store_sk#15] -(unknown) Scan parquet spark_catalog.default.item +(63) Scan parquet spark_catalog.default.item Output [1]: [i_item_sk#17] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -428,7 +428,7 @@ BroadcastExchange (78) +- CometScan parquet spark_catalog.default.date_dim (74) -(unknown) Scan parquet spark_catalog.default.date_dim +(74) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_year#103] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q34/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q34/explain.txt index a86edcfa3..387fa47f9 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q34/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q34/explain.txt @@ -33,7 +33,7 @@ +- CometScan parquet spark_catalog.default.customer (25) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -61,7 +61,7 @@ Join condition: None Output [4]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4] Input [6]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, ss_sold_date_sk#5, d_date_sk#7] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#8, s_county#9] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -93,7 +93,7 @@ Join condition: None Output [3]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_ticket_number#4] Input [5]: [ss_customer_sk#1, ss_hdemo_sk#2, ss_store_sk#3, ss_ticket_number#4, s_store_sk#8] -(unknown) Scan parquet spark_catalog.default.household_demographics +(14) Scan parquet spark_catalog.default.household_demographics Output [4]: [hd_demo_sk#10, hd_buy_potential#11, hd_dep_count#12, hd_vehicle_count#13] Batched: true Location [not included in comparison]/{warehouse_dir}/household_demographics] @@ -147,7 +147,7 @@ Results [3]: [ss_ticket_number#4, ss_customer_sk#1, count(1)#16 AS cnt#17] Input [3]: [ss_ticket_number#4, ss_customer_sk#1, cnt#17] Condition : ((cnt#17 >= 15) AND (cnt#17 <= 20)) -(unknown) Scan parquet spark_catalog.default.customer +(25) Scan parquet spark_catalog.default.customer Output [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -193,7 +193,7 @@ BroadcastExchange (37) +- CometScan parquet spark_catalog.default.date_dim (33) -(unknown) Scan parquet spark_catalog.default.date_dim +(33) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#23, d_dom#24] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt index e723b6c0e..ce8753277 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35/explain.txt @@ -43,7 +43,7 @@ TakeOrderedAndProject (42) +- CometScan parquet spark_catalog.default.customer_demographics (33) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -57,7 +57,7 @@ Condition : (isnotnull(c_current_addr_sk#5) AND isnotnull(c_current_cdemo_sk#4)) (3) ColumnarToRow [codegen id : 9] Input [3]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -90,7 +90,7 @@ Right keys [1]: [ss_customer_sk#6] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(11) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#10, ws_sold_date_sk#11] Batched: true Location: InMemoryFileIndex [] @@ -123,7 +123,7 @@ Right keys [1]: [ws_bill_customer_sk#10] Join type: ExistenceJoin(exists#2) Join condition: None -(unknown) Scan parquet spark_catalog.default.catalog_sales +(18) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#14, cs_sold_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -164,7 +164,7 @@ Condition : (exists#2 OR exists#1) Output [2]: [c_current_cdemo_sk#4, c_current_addr_sk#5] Input [5]: [c_customer_sk#3, c_current_cdemo_sk#4, c_current_addr_sk#5, exists#2, exists#1] -(unknown) Scan parquet spark_catalog.default.customer_address +(27) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_state#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -192,7 +192,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#4, ca_state#19] Input [4]: [c_current_cdemo_sk#4, c_current_addr_sk#5, ca_address_sk#18, ca_state#19] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(33) Scan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_dep_count#23, cd_dep_employed_count#24, cd_dep_college_count#25] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -252,7 +252,7 @@ BroadcastExchange (47) +- CometScan parquet spark_catalog.default.date_dim (43) -(unknown) Scan parquet spark_catalog.default.date_dim +(43) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#74, d_qoy#75] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt index 29e62c0b7..648b19933 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q35a/explain.txt @@ -41,7 +41,7 @@ TakeOrderedAndProject (40) +- CometScan parquet spark_catalog.default.customer_demographics (31) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -55,7 +55,7 @@ Condition : (isnotnull(c_current_addr_sk#3) AND isnotnull(c_current_cdemo_sk#2)) (3) ColumnarToRow [codegen id : 9] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [2]: [ss_customer_sk#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -88,7 +88,7 @@ Right keys [1]: [ss_customer_sk#4] Join type: LeftSemi Join condition: None -(unknown) Scan parquet spark_catalog.default.web_sales +(11) Scan parquet spark_catalog.default.web_sales Output [2]: [ws_bill_customer_sk#8, ws_sold_date_sk#9] Batched: true Location: InMemoryFileIndex [] @@ -111,7 +111,7 @@ Join condition: None Output [1]: [ws_bill_customer_sk#8 AS customsk#12] Input [3]: [ws_bill_customer_sk#8, ws_sold_date_sk#9, d_date_sk#11] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(16) Scan parquet spark_catalog.default.catalog_sales Output [2]: [cs_ship_customer_sk#13, cs_sold_date_sk#14] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#2, c_current_addr_sk#3] Input [3]: [c_customer_sk#1, c_current_cdemo_sk#2, c_current_addr_sk#3] -(unknown) Scan parquet spark_catalog.default.customer_address +(25) Scan parquet spark_catalog.default.customer_address Output [2]: [ca_address_sk#18, ca_state#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_address] @@ -178,7 +178,7 @@ Join condition: None Output [2]: [c_current_cdemo_sk#2, ca_state#19] Input [4]: [c_current_cdemo_sk#2, c_current_addr_sk#3, ca_address_sk#18, ca_state#19] -(unknown) Scan parquet spark_catalog.default.customer_demographics +(31) Scan parquet spark_catalog.default.customer_demographics Output [6]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_dep_count#23, cd_dep_employed_count#24, cd_dep_college_count#25] Batched: true Location [not included in comparison]/{warehouse_dir}/customer_demographics] @@ -238,7 +238,7 @@ BroadcastExchange (45) +- CometScan parquet spark_catalog.default.date_dim (41) -(unknown) Scan parquet spark_catalog.default.date_dim +(41) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#7, d_year#74, d_qoy#75] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt index 2519f23a2..42a213b7c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q36a/explain.txt @@ -42,7 +42,7 @@ TakeOrderedAndProject (41) +- ReusedExchange (28) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -70,7 +70,7 @@ Join condition: None Output [4]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4] Input [6]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, ss_sold_date_sk#5, d_date_sk#7] -(unknown) Scan parquet spark_catalog.default.item +(7) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -98,7 +98,7 @@ Join condition: None Output [5]: [ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, i_class#9, i_category#10] Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, i_item_sk#8, i_class#9, i_category#10] -(unknown) Scan parquet spark_catalog.default.store +(13) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#11, s_state#12] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -254,7 +254,7 @@ BroadcastExchange (46) +- CometScan parquet spark_catalog.default.date_dim (42) -(unknown) Scan parquet spark_catalog.default.date_dim +(42) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#7, d_year#64] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q47/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q47/explain.txt index 2be3c9c66..727f50abf 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q47/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q47/explain.txt @@ -46,7 +46,7 @@ TakeOrderedAndProject (45) +- ReusedExchange (38) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -60,7 +60,7 @@ Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull( (3) ColumnarToRow [codegen id : 4] Input [3]: [i_item_sk#1, i_brand#2, i_category#3] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_item_sk#4, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -102,7 +102,7 @@ Join condition: None Output [6]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, d_year#10, d_moy#11] Input [8]: [i_brand#2, i_category#3, ss_store_sk#5, ss_sales_price#6, ss_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] -(unknown) Scan parquet spark_catalog.default.store +(13) Scan parquet spark_catalog.default.store Output [3]: [s_store_sk#12, s_store_name#13, s_company_name#14] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -258,7 +258,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (46) -(unknown) Scan parquet spark_catalog.default.date_dim +(46) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt index f8419179d..6591c8b8a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q49/explain.txt @@ -75,7 +75,7 @@ TakeOrderedAndProject (74) +- ReusedExchange (57) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [6]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_net_profit#5, ws_sold_date_sk#6] Batched: true Location: InMemoryFileIndex [] @@ -95,7 +95,7 @@ Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_so Input [5]: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] Arguments: [ws_item_sk#1, ws_order_number#2, ws_quantity#3, ws_net_paid#4, ws_sold_date_sk#6] -(unknown) Scan parquet spark_catalog.default.web_returns +(5) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#8, wr_order_number#9, wr_return_quantity#10, wr_return_amt#11, wr_returned_date_sk#12] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -181,7 +181,7 @@ Condition : ((return_rank#33 <= 10) OR (currency_rank#34 <= 10)) Output [5]: [web AS channel#35, item#30, return_ratio#31, return_rank#33, currency_rank#34] Input [5]: [item#30, return_ratio#31, currency_ratio#32, return_rank#33, currency_rank#34] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(24) Scan parquet spark_catalog.default.catalog_sales Output [6]: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, cs_net_profit#40, cs_sold_date_sk#41] Batched: true Location: InMemoryFileIndex [] @@ -201,7 +201,7 @@ Arguments: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, c Input [5]: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, cs_sold_date_sk#41] Arguments: [cs_item_sk#36, cs_order_number#37, cs_quantity#38, cs_net_paid#39, cs_sold_date_sk#41] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(28) Scan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#43, cr_order_number#44, cr_return_quantity#45, cr_return_amount#46, cr_returned_date_sk#47] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -287,7 +287,7 @@ Condition : ((return_rank#68 <= 10) OR (currency_rank#69 <= 10)) Output [5]: [catalog AS channel#70, item#65, return_ratio#66, return_rank#68, currency_rank#69] Input [5]: [item#65, return_ratio#66, currency_ratio#67, return_rank#68, currency_rank#69] -(unknown) Scan parquet spark_catalog.default.store_sales +(47) Scan parquet spark_catalog.default.store_sales Output [6]: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, ss_net_profit#75, ss_sold_date_sk#76] Batched: true Location: InMemoryFileIndex [] @@ -307,7 +307,7 @@ Arguments: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, Input [5]: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, ss_sold_date_sk#76] Arguments: [ss_item_sk#71, ss_ticket_number#72, ss_quantity#73, ss_net_paid#74, ss_sold_date_sk#76] -(unknown) Scan parquet spark_catalog.default.store_returns +(51) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#78, sr_ticket_number#79, sr_return_quantity#80, sr_return_amt#81, sr_returned_date_sk#82] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -427,7 +427,7 @@ BroadcastExchange (79) +- CometScan parquet spark_catalog.default.date_dim (75) -(unknown) Scan parquet spark_catalog.default.date_dim +(75) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#13, d_year#106, d_moy#107] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt index 0dedd0472..ee61d214a 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q51a/explain.txt @@ -68,7 +68,7 @@ TakeOrderedAndProject (67) +- ReusedExchange (57) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_sales_price#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -185,7 +185,7 @@ Arguments: hashpartitioning(item_sk#10, d_date#6, 5), ENSURE_REQUIREMENTS, [plan Input [3]: [item_sk#10, d_date#6, cume_sales#23] Arguments: [item_sk#10 ASC NULLS FIRST, d_date#6 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_sales +(26) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#24, ss_sales_price#25, ss_sold_date_sk#26] Batched: true Location: InMemoryFileIndex [] @@ -389,7 +389,7 @@ BroadcastExchange (72) +- CometScan parquet spark_catalog.default.date_dim (68) -(unknown) Scan parquet spark_catalog.default.date_dim +(68) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#5, d_date#6, d_month_seq#64] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt index dff12158b..dbcade8ca 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q57/explain.txt @@ -46,7 +46,7 @@ TakeOrderedAndProject (45) +- ReusedExchange (38) -(unknown) Scan parquet spark_catalog.default.item +(1) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#1, i_brand#2, i_category#3] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -60,7 +60,7 @@ Condition : ((isnotnull(i_item_sk#1) AND isnotnull(i_category#3)) AND isnotnull( (3) ColumnarToRow [codegen id : 4] Input [3]: [i_item_sk#1, i_brand#2, i_category#3] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(4) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#4, cs_item_sk#5, cs_sales_price#6, cs_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -102,7 +102,7 @@ Join condition: None Output [6]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, d_year#10, d_moy#11] Input [8]: [i_brand#2, i_category#3, cs_call_center_sk#4, cs_sales_price#6, cs_sold_date_sk#7, d_date_sk#9, d_year#10, d_moy#11] -(unknown) Scan parquet spark_catalog.default.call_center +(13) Scan parquet spark_catalog.default.call_center Output [2]: [cc_call_center_sk#12, cc_name#13] Batched: true Location [not included in comparison]/{warehouse_dir}/call_center] @@ -258,7 +258,7 @@ BroadcastExchange (49) +- CometScan parquet spark_catalog.default.date_dim (46) -(unknown) Scan parquet spark_catalog.default.date_dim +(46) Scan parquet spark_catalog.default.date_dim Output [3]: [d_date_sk#9, d_year#10, d_moy#11] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt index d0c446ece..2345d02e7 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q5a/explain.txt @@ -138,7 +138,7 @@ TakeOrderedAndProject (137) +- ReusedExchange (120) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -154,7 +154,7 @@ Condition : isnotnull(ss_store_sk#1) Input [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Arguments: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11], [ss_store_sk#1 AS store_sk#6, ss_sold_date_sk#4 AS date_sk#7, ss_ext_sales_price#2 AS sales_price#8, ss_net_profit#3 AS profit#9, 0.00 AS return_amt#10, 0.00 AS net_loss#11] -(unknown) Scan parquet spark_catalog.default.store_returns +(4) Scan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#12, sr_return_amt#13, sr_net_loss#14, sr_returned_date_sk#15] Batched: true Location: InMemoryFileIndex [] @@ -190,7 +190,7 @@ Join condition: None Output [5]: [store_sk#6, sales_price#8, profit#9, return_amt#10, net_loss#11] Input [7]: [store_sk#6, date_sk#7, sales_price#8, profit#9, return_amt#10, net_loss#11, d_date_sk#22] -(unknown) Scan parquet spark_catalog.default.store +(12) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#23, s_store_id#24] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -236,7 +236,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#8)), sum(UnscaledValue(return_amt# Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#8))#33, sum(UnscaledValue(return_amt#10))#34, sum(UnscaledValue(profit#9))#35, sum(UnscaledValue(net_loss#11))#36] Results [5]: [store channel AS channel#37, concat(store, s_store_id#24) AS id#38, MakeDecimal(sum(UnscaledValue(sales_price#8))#33,17,2) AS sales#39, MakeDecimal(sum(UnscaledValue(return_amt#10))#34,17,2) AS returns#40, (MakeDecimal(sum(UnscaledValue(profit#9))#35,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#11))#36,17,2)) AS profit#41] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(21) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_catalog_page_sk#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Batched: true Location: InMemoryFileIndex [] @@ -252,7 +252,7 @@ Condition : isnotnull(cs_catalog_page_sk#42) Input [4]: [cs_catalog_page_sk#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Arguments: [page_sk#47, date_sk#48, sales_price#49, profit#50, return_amt#51, net_loss#52], [cs_catalog_page_sk#42 AS page_sk#47, cs_sold_date_sk#45 AS date_sk#48, cs_ext_sales_price#43 AS sales_price#49, cs_net_profit#44 AS profit#50, 0.00 AS return_amt#51, 0.00 AS net_loss#52] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(24) Scan parquet spark_catalog.default.catalog_returns Output [4]: [cr_catalog_page_sk#53, cr_return_amount#54, cr_net_loss#55, cr_returned_date_sk#56] Batched: true Location: InMemoryFileIndex [] @@ -288,7 +288,7 @@ Join condition: None Output [5]: [page_sk#47, sales_price#49, profit#50, return_amt#51, net_loss#52] Input [7]: [page_sk#47, date_sk#48, sales_price#49, profit#50, return_amt#51, net_loss#52, d_date_sk#63] -(unknown) Scan parquet spark_catalog.default.catalog_page +(32) Scan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#64, cp_catalog_page_id#65] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -334,7 +334,7 @@ Functions [4]: [sum(UnscaledValue(sales_price#49)), sum(UnscaledValue(return_amt Aggregate Attributes [4]: [sum(UnscaledValue(sales_price#49))#74, sum(UnscaledValue(return_amt#51))#75, sum(UnscaledValue(profit#50))#76, sum(UnscaledValue(net_loss#52))#77] Results [5]: [catalog channel AS channel#78, concat(catalog_page, cp_catalog_page_id#65) AS id#79, MakeDecimal(sum(UnscaledValue(sales_price#49))#74,17,2) AS sales#80, MakeDecimal(sum(UnscaledValue(return_amt#51))#75,17,2) AS returns#81, (MakeDecimal(sum(UnscaledValue(profit#50))#76,17,2) - MakeDecimal(sum(UnscaledValue(net_loss#52))#77,17,2)) AS profit#82] -(unknown) Scan parquet spark_catalog.default.web_sales +(41) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_web_site_sk#83, ws_ext_sales_price#84, ws_net_profit#85, ws_sold_date_sk#86] Batched: true Location: InMemoryFileIndex [] @@ -350,7 +350,7 @@ Condition : isnotnull(ws_web_site_sk#83) Input [4]: [ws_web_site_sk#83, ws_ext_sales_price#84, ws_net_profit#85, ws_sold_date_sk#86] Arguments: [wsr_web_site_sk#88, date_sk#89, sales_price#90, profit#91, return_amt#92, net_loss#93], [ws_web_site_sk#83 AS wsr_web_site_sk#88, ws_sold_date_sk#86 AS date_sk#89, ws_ext_sales_price#84 AS sales_price#90, ws_net_profit#85 AS profit#91, 0.00 AS return_amt#92, 0.00 AS net_loss#93] -(unknown) Scan parquet spark_catalog.default.web_returns +(44) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#94, wr_order_number#95, wr_return_amt#96, wr_net_loss#97, wr_returned_date_sk#98] Batched: true Location: InMemoryFileIndex [] @@ -361,7 +361,7 @@ ReadSchema: struct date_add(d_date#23, 5)) Output [6]: [cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Input [10]: [cs_ship_date_sk#1, cs_item_sk#4, cs_promo_sk#5, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_date#23, d_week_seq#24, d_date_sk#27, d_date#28] -(unknown) Scan parquet spark_catalog.default.promotion +(51) Scan parquet spark_catalog.default.promotion Output [1]: [p_promo_sk#29] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -340,7 +340,7 @@ Arguments: hashpartitioning(cs_item_sk#4, cs_order_number#6, 5), ENSURE_REQUIREM Input [5]: [cs_item_sk#4, cs_order_number#6, w_warehouse_name#15, i_item_desc#17, d_week_seq#24] Arguments: [cs_item_sk#4 ASC NULLS FIRST, cs_order_number#6 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(59) Scan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#30, cr_order_number#31, cr_returned_date_sk#32] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -408,7 +408,7 @@ BroadcastExchange (75) +- CometScan parquet spark_catalog.default.date_dim (71) -(unknown) Scan parquet spark_catalog.default.date_dim +(71) Scan parquet spark_catalog.default.date_dim Output [4]: [d_date_sk#22, d_date#23, d_week_seq#24, d_year#39] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt index 776fad007..ad8c33ca1 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q74/explain.txt @@ -72,7 +72,7 @@ TakeOrderedAndProject (71) +- ReusedExchange (62) -(unknown) Scan parquet spark_catalog.default.customer +(1) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -86,7 +86,7 @@ Condition : (isnotnull(c_customer_sk#1) AND isnotnull(c_customer_id#2)) (3) ColumnarToRow [codegen id : 3] Input [4]: [c_customer_sk#1, c_customer_id#2, c_first_name#3, c_last_name#4] -(unknown) Scan parquet spark_catalog.default.store_sales +(4) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_customer_sk#5, ss_net_paid#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -150,7 +150,7 @@ Results [2]: [c_customer_id#2 AS customer_id#14, MakeDecimal(sum(UnscaledValue(s Input [2]: [customer_id#14, year_total#15] Condition : (isnotnull(year_total#15) AND (year_total#15 > 0.00)) -(unknown) Scan parquet spark_catalog.default.customer +(17) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#16, c_customer_id#17, c_first_name#18, c_last_name#19] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -164,7 +164,7 @@ Condition : (isnotnull(c_customer_sk#16) AND isnotnull(c_customer_id#17)) (19) ColumnarToRow [codegen id : 6] Input [4]: [c_customer_sk#16, c_customer_id#17, c_first_name#18, c_last_name#19] -(unknown) Scan parquet spark_catalog.default.store_sales +(20) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_customer_sk#20, ss_net_paid#21, ss_sold_date_sk#22] Batched: true Location: InMemoryFileIndex [] @@ -234,7 +234,7 @@ Right keys [1]: [customer_id#28] Join type: Inner Join condition: None -(unknown) Scan parquet spark_catalog.default.customer +(34) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#32, c_customer_id#33, c_first_name#34, c_last_name#35] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -248,7 +248,7 @@ Condition : (isnotnull(c_customer_sk#32) AND isnotnull(c_customer_id#33)) (36) ColumnarToRow [codegen id : 10] Input [4]: [c_customer_sk#32, c_customer_id#33, c_first_name#34, c_last_name#35] -(unknown) Scan parquet spark_catalog.default.web_sales +(37) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_customer_sk#36, ws_net_paid#37, ws_sold_date_sk#38] Batched: true Location: InMemoryFileIndex [] @@ -326,7 +326,7 @@ Join condition: None Output [7]: [customer_id#14, year_total#15, customer_id#28, customer_first_name#29, customer_last_name#30, year_total#31, year_total#46] Input [8]: [customer_id#14, year_total#15, customer_id#28, customer_first_name#29, customer_last_name#30, year_total#31, customer_id#45, year_total#46] -(unknown) Scan parquet spark_catalog.default.customer +(53) Scan parquet spark_catalog.default.customer Output [4]: [c_customer_sk#47, c_customer_id#48, c_first_name#49, c_last_name#50] Batched: true Location [not included in comparison]/{warehouse_dir}/customer] @@ -340,7 +340,7 @@ Condition : (isnotnull(c_customer_sk#47) AND isnotnull(c_customer_id#48)) (55) ColumnarToRow [codegen id : 14] Input [4]: [c_customer_sk#47, c_customer_id#48, c_first_name#49, c_last_name#50] -(unknown) Scan parquet spark_catalog.default.web_sales +(56) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_bill_customer_sk#51, ws_net_paid#52, ws_sold_date_sk#53] Batched: true Location: InMemoryFileIndex [] @@ -427,7 +427,7 @@ BroadcastExchange (75) +- CometScan parquet spark_catalog.default.date_dim (72) -(unknown) Scan parquet spark_catalog.default.date_dim +(72) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#9, d_year#10] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -452,7 +452,7 @@ BroadcastExchange (79) +- CometScan parquet spark_catalog.default.date_dim (76) -(unknown) Scan parquet spark_catalog.default.date_dim +(76) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#24, d_year#25] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q75/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q75/explain.txt index 1c5416407..14a9459aa 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q75/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q75/explain.txt @@ -130,7 +130,7 @@ TakeOrderedAndProject (129) +- ReusedExchange (113) -(unknown) Scan parquet spark_catalog.default.catalog_sales +(1) Scan parquet spark_catalog.default.catalog_sales Output [5]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, cs_sold_date_sk#5] Batched: true Location: InMemoryFileIndex [] @@ -145,7 +145,7 @@ Condition : isnotnull(cs_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [5]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, cs_sold_date_sk#5] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -198,7 +198,7 @@ Arguments: hashpartitioning(cs_order_number#2, cs_item_sk#1, 5), ENSURE_REQUIREM Input [9]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, d_year#14] Arguments: [cs_order_number#2 ASC NULLS FIRST, cs_item_sk#1 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(16) Scan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#15, cr_order_number#16, cr_return_quantity#17, cr_return_amount#18, cr_returned_date_sk#19] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -234,7 +234,7 @@ Join condition: None Output [7]: [d_year#14, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, (cs_quantity#3 - coalesce(cr_return_quantity#17, 0)) AS sales_cnt#20, (cs_ext_sales_price#4 - coalesce(cr_return_amount#18, 0.00)) AS sales_amt#21] Input [13]: [cs_item_sk#1, cs_order_number#2, cs_quantity#3, cs_ext_sales_price#4, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, d_year#14, cr_item_sk#15, cr_order_number#16, cr_return_quantity#17, cr_return_amount#18] -(unknown) Scan parquet spark_catalog.default.store_sales +(24) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#22, ss_ticket_number#23, ss_quantity#24, ss_ext_sales_price#25, ss_sold_date_sk#26] Batched: true Location: InMemoryFileIndex [] @@ -283,7 +283,7 @@ Arguments: hashpartitioning(ss_ticket_number#23, ss_item_sk#22, 5), ENSURE_REQUI Input [9]: [ss_item_sk#22, ss_ticket_number#23, ss_quantity#24, ss_ext_sales_price#25, i_brand_id#29, i_class_id#30, i_category_id#31, i_manufact_id#32, d_year#34] Arguments: [ss_ticket_number#23 ASC NULLS FIRST, ss_item_sk#22 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(35) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#35, sr_ticket_number#36, sr_return_quantity#37, sr_return_amt#38, sr_returned_date_sk#39] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -319,7 +319,7 @@ Join condition: None Output [7]: [d_year#34, i_brand_id#29, i_class_id#30, i_category_id#31, i_manufact_id#32, (ss_quantity#24 - coalesce(sr_return_quantity#37, 0)) AS sales_cnt#40, (ss_ext_sales_price#25 - coalesce(sr_return_amt#38, 0.00)) AS sales_amt#41] Input [13]: [ss_item_sk#22, ss_ticket_number#23, ss_quantity#24, ss_ext_sales_price#25, i_brand_id#29, i_class_id#30, i_category_id#31, i_manufact_id#32, d_year#34, sr_item_sk#35, sr_ticket_number#36, sr_return_quantity#37, sr_return_amt#38] -(unknown) Scan parquet spark_catalog.default.web_sales +(43) Scan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#42, ws_order_number#43, ws_quantity#44, ws_ext_sales_price#45, ws_sold_date_sk#46] Batched: true Location: InMemoryFileIndex [] @@ -368,7 +368,7 @@ Arguments: hashpartitioning(ws_order_number#43, ws_item_sk#42, 5), ENSURE_REQUIR Input [9]: [ws_item_sk#42, ws_order_number#43, ws_quantity#44, ws_ext_sales_price#45, i_brand_id#49, i_class_id#50, i_category_id#51, i_manufact_id#52, d_year#54] Arguments: [ws_order_number#43 ASC NULLS FIRST, ws_item_sk#42 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_returns +(54) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#55, wr_order_number#56, wr_return_quantity#57, wr_return_amt#58, wr_returned_date_sk#59] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -454,7 +454,7 @@ Arguments: hashpartitioning(i_brand_id#8, i_class_id#9, i_category_id#10, i_manu Input [7]: [d_year#14, i_brand_id#8, i_class_id#9, i_category_id#10, i_manufact_id#12, sales_cnt#68, sales_amt#69] Arguments: [i_brand_id#8 ASC NULLS FIRST, i_class_id#9 ASC NULLS FIRST, i_category_id#10 ASC NULLS FIRST, i_manufact_id#12 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_sales +(72) Scan parquet spark_catalog.default.catalog_sales Output [5]: [cs_item_sk#70, cs_order_number#71, cs_quantity#72, cs_ext_sales_price#73, cs_sold_date_sk#74] Batched: true Location: InMemoryFileIndex [] @@ -520,7 +520,7 @@ Join condition: None Output [7]: [d_year#82, i_brand_id#77, i_class_id#78, i_category_id#79, i_manufact_id#80, (cs_quantity#72 - coalesce(cr_return_quantity#85, 0)) AS sales_cnt#20, (cs_ext_sales_price#73 - coalesce(cr_return_amount#86, 0.00)) AS sales_amt#21] Input [13]: [cs_item_sk#70, cs_order_number#71, cs_quantity#72, cs_ext_sales_price#73, i_brand_id#77, i_class_id#78, i_category_id#79, i_manufact_id#80, d_year#82, cr_item_sk#83, cr_order_number#84, cr_return_quantity#85, cr_return_amount#86] -(unknown) Scan parquet spark_catalog.default.store_sales +(87) Scan parquet spark_catalog.default.store_sales Output [5]: [ss_item_sk#87, ss_ticket_number#88, ss_quantity#89, ss_ext_sales_price#90, ss_sold_date_sk#91] Batched: true Location: InMemoryFileIndex [] @@ -586,7 +586,7 @@ Join condition: None Output [7]: [d_year#99, i_brand_id#94, i_class_id#95, i_category_id#96, i_manufact_id#97, (ss_quantity#89 - coalesce(sr_return_quantity#102, 0)) AS sales_cnt#40, (ss_ext_sales_price#90 - coalesce(sr_return_amt#103, 0.00)) AS sales_amt#41] Input [13]: [ss_item_sk#87, ss_ticket_number#88, ss_quantity#89, ss_ext_sales_price#90, i_brand_id#94, i_class_id#95, i_category_id#96, i_manufact_id#97, d_year#99, sr_item_sk#100, sr_ticket_number#101, sr_return_quantity#102, sr_return_amt#103] -(unknown) Scan parquet spark_catalog.default.web_sales +(102) Scan parquet spark_catalog.default.web_sales Output [5]: [ws_item_sk#104, ws_order_number#105, ws_quantity#106, ws_ext_sales_price#107, ws_sold_date_sk#108] Batched: true Location: InMemoryFileIndex [] @@ -725,7 +725,7 @@ BroadcastExchange (133) +- CometScan parquet spark_catalog.default.date_dim (130) -(unknown) Scan parquet spark_catalog.default.date_dim +(130) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#13, d_year#14] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] @@ -754,7 +754,7 @@ BroadcastExchange (137) +- CometScan parquet spark_catalog.default.date_dim (134) -(unknown) Scan parquet spark_catalog.default.date_dim +(134) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#81, d_year#82] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt index 82c7d9b24..dd92e2a1c 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q77a/explain.txt @@ -99,7 +99,7 @@ TakeOrderedAndProject (98) +- ReusedExchange (89) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [4]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4] Batched: true Location: InMemoryFileIndex [] @@ -127,7 +127,7 @@ Join condition: None Output [3]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3] Input [5]: [ss_store_sk#1, ss_ext_sales_price#2, ss_net_profit#3, ss_sold_date_sk#4, d_date_sk#6] -(unknown) Scan parquet spark_catalog.default.store +(7) Scan parquet spark_catalog.default.store Output [1]: [s_store_sk#7] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -173,7 +173,7 @@ Functions [2]: [sum(UnscaledValue(ss_ext_sales_price#2)), sum(UnscaledValue(ss_n Aggregate Attributes [2]: [sum(UnscaledValue(ss_ext_sales_price#2))#12, sum(UnscaledValue(ss_net_profit#3))#13] Results [3]: [s_store_sk#7, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#2))#12,17,2) AS sales#14, MakeDecimal(sum(UnscaledValue(ss_net_profit#3))#13,17,2) AS profit#15] -(unknown) Scan parquet spark_catalog.default.store_returns +(16) Scan parquet spark_catalog.default.store_returns Output [4]: [sr_store_sk#16, sr_return_amt#17, sr_net_loss#18, sr_returned_date_sk#19] Batched: true Location: InMemoryFileIndex [] @@ -246,7 +246,7 @@ Join condition: None Output [5]: [store channel AS channel#31, s_store_sk#7 AS id#32, sales#14, coalesce(returns#29, 0.00) AS returns#33, (profit#15 - coalesce(profit_loss#30, 0.00)) AS profit#34] Input [6]: [s_store_sk#7, sales#14, profit#15, s_store_sk#22, returns#29, profit_loss#30] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(31) Scan parquet spark_catalog.default.catalog_sales Output [4]: [cs_call_center_sk#35, cs_ext_sales_price#36, cs_net_profit#37, cs_sold_date_sk#38] Batched: true Location: InMemoryFileIndex [] @@ -291,7 +291,7 @@ Results [3]: [cs_call_center_sk#35, MakeDecimal(sum(UnscaledValue(cs_ext_sales_p Input [3]: [cs_call_center_sk#35, sales#47, profit#48] Arguments: IdentityBroadcastMode, [plan_id=6] -(unknown) Scan parquet spark_catalog.default.catalog_returns +(40) Scan parquet spark_catalog.default.catalog_returns Output [3]: [cr_return_amount#49, cr_net_loss#50, cr_returned_date_sk#51] Batched: true Location: InMemoryFileIndex [] @@ -340,7 +340,7 @@ Join condition: None Output [5]: [catalog channel AS channel#62, cs_call_center_sk#35 AS id#63, sales#47, returns#60, (profit#48 - profit_loss#61) AS profit#64] Input [5]: [cs_call_center_sk#35, sales#47, profit#48, returns#60, profit_loss#61] -(unknown) Scan parquet spark_catalog.default.web_sales +(50) Scan parquet spark_catalog.default.web_sales Output [4]: [ws_web_page_sk#65, ws_ext_sales_price#66, ws_net_profit#67, ws_sold_date_sk#68] Batched: true Location: InMemoryFileIndex [] @@ -368,7 +368,7 @@ Join condition: None Output [3]: [ws_web_page_sk#65, ws_ext_sales_price#66, ws_net_profit#67] Input [5]: [ws_web_page_sk#65, ws_ext_sales_price#66, ws_net_profit#67, ws_sold_date_sk#68, d_date_sk#70] -(unknown) Scan parquet spark_catalog.default.web_page +(56) Scan parquet spark_catalog.default.web_page Output [1]: [wp_web_page_sk#71] Batched: true Location [not included in comparison]/{warehouse_dir}/web_page] @@ -414,7 +414,7 @@ Functions [2]: [sum(UnscaledValue(ws_ext_sales_price#66)), sum(UnscaledValue(ws_ Aggregate Attributes [2]: [sum(UnscaledValue(ws_ext_sales_price#66))#76, sum(UnscaledValue(ws_net_profit#67))#77] Results [3]: [wp_web_page_sk#71, MakeDecimal(sum(UnscaledValue(ws_ext_sales_price#66))#76,17,2) AS sales#78, MakeDecimal(sum(UnscaledValue(ws_net_profit#67))#77,17,2) AS profit#79] -(unknown) Scan parquet spark_catalog.default.web_returns +(65) Scan parquet spark_catalog.default.web_returns Output [4]: [wr_web_page_sk#80, wr_return_amt#81, wr_net_loss#82, wr_returned_date_sk#83] Batched: true Location: InMemoryFileIndex [] @@ -597,7 +597,7 @@ BroadcastExchange (103) +- CometScan parquet spark_catalog.default.date_dim (99) -(unknown) Scan parquet spark_catalog.default.date_dim +(99) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#6, d_date#159] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q78/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q78/explain.txt index da7812b31..c7ee5b1c9 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q78/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q78/explain.txt @@ -71,7 +71,7 @@ TakeOrderedAndProject (70) +- ReusedExchange (60) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [7]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_wholesale_cost#5, ss_sales_price#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -94,7 +94,7 @@ Arguments: hashpartitioning(ss_ticket_number#3, ss_item_sk#1, 5), ENSURE_REQUIRE Input [7]: [ss_item_sk#1, ss_customer_sk#2, ss_ticket_number#3, ss_quantity#4, ss_wholesale_cost#5, ss_sales_price#6, ss_sold_date_sk#7] Arguments: [ss_ticket_number#3 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(6) Scan parquet spark_catalog.default.store_returns Output [3]: [sr_item_sk#9, sr_ticket_number#10, sr_returned_date_sk#11] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -169,7 +169,7 @@ Results [6]: [d_year#13 AS ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, sum( Input [6]: [ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, ss_qty#24, ss_wc#25, ss_sp#26] Arguments: [ss_sold_year#23 ASC NULLS FIRST, ss_item_sk#1 ASC NULLS FIRST, ss_customer_sk#2 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_sales +(22) Scan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#27, ws_bill_customer_sk#28, ws_order_number#29, ws_quantity#30, ws_wholesale_cost#31, ws_sales_price#32, ws_sold_date_sk#33] Batched: true Location: InMemoryFileIndex [] @@ -192,7 +192,7 @@ Arguments: hashpartitioning(ws_order_number#29, ws_item_sk#27, 5), ENSURE_REQUIR Input [7]: [ws_item_sk#27, ws_bill_customer_sk#28, ws_order_number#29, ws_quantity#30, ws_wholesale_cost#31, ws_sales_price#32, ws_sold_date_sk#33] Arguments: [ws_order_number#29 ASC NULLS FIRST, ws_item_sk#27 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_returns +(27) Scan parquet spark_catalog.default.web_returns Output [3]: [wr_item_sk#35, wr_order_number#36, wr_returned_date_sk#37] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -281,7 +281,7 @@ Join condition: None Output [9]: [ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, ss_qty#24, ss_wc#25, ss_sp#26, ws_qty#51, ws_wc#52, ws_sp#53] Input [12]: [ss_sold_year#23, ss_item_sk#1, ss_customer_sk#2, ss_qty#24, ss_wc#25, ss_sp#26, ws_sold_year#49, ws_item_sk#27, ws_customer_sk#50, ws_qty#51, ws_wc#52, ws_sp#53] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(46) Scan parquet spark_catalog.default.catalog_sales Output [7]: [cs_bill_customer_sk#54, cs_item_sk#55, cs_order_number#56, cs_quantity#57, cs_wholesale_cost#58, cs_sales_price#59, cs_sold_date_sk#60] Batched: true Location: InMemoryFileIndex [] @@ -304,7 +304,7 @@ Arguments: hashpartitioning(cs_order_number#56, cs_item_sk#55, 5), ENSURE_REQUIR Input [7]: [cs_bill_customer_sk#54, cs_item_sk#55, cs_order_number#56, cs_quantity#57, cs_wholesale_cost#58, cs_sales_price#59, cs_sold_date_sk#60] Arguments: [cs_order_number#56 ASC NULLS FIRST, cs_item_sk#55 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(51) Scan parquet spark_catalog.default.catalog_returns Output [3]: [cr_item_sk#62, cr_order_number#63, cr_returned_date_sk#64] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -406,7 +406,7 @@ BroadcastExchange (74) +- CometScan parquet spark_catalog.default.date_dim (71) -(unknown) Scan parquet spark_catalog.default.date_dim +(71) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#12, d_year#13] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt index 0c9c97812..7cd1e2225 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q80a/explain.txt @@ -121,7 +121,7 @@ TakeOrderedAndProject (120) +- ReusedExchange (111) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Batched: true Location: InMemoryFileIndex [] @@ -144,7 +144,7 @@ Arguments: hashpartitioning(ss_item_sk#1, ss_ticket_number#4, 5), ENSURE_REQUIRE Input [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ticket_number#4, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7] Arguments: [ss_item_sk#1 ASC NULLS FIRST, ss_ticket_number#4 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.store_returns +(6) Scan parquet spark_catalog.default.store_returns Output [5]: [sr_item_sk#9, sr_ticket_number#10, sr_return_amt#11, sr_net_loss#12, sr_returned_date_sk#13] Batched: true Location [not included in comparison]/{warehouse_dir}/store_returns] @@ -193,7 +193,7 @@ Join condition: None Output [7]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12] Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, ss_sold_date_sk#7, sr_return_amt#11, sr_net_loss#12, d_date_sk#14] -(unknown) Scan parquet spark_catalog.default.store +(17) Scan parquet spark_catalog.default.store Output [2]: [s_store_sk#15, s_store_id#16] Batched: true Location [not included in comparison]/{warehouse_dir}/store] @@ -221,7 +221,7 @@ Join condition: None Output [7]: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#16] Input [9]: [ss_item_sk#1, ss_store_sk#2, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_sk#15, s_store_id#16] -(unknown) Scan parquet spark_catalog.default.item +(23) Scan parquet spark_catalog.default.item Output [2]: [i_item_sk#17, i_current_price#18] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -253,7 +253,7 @@ Join condition: None Output [6]: [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#16] Input [8]: [ss_item_sk#1, ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#11, sr_net_loss#12, s_store_id#16, i_item_sk#17] -(unknown) Scan parquet spark_catalog.default.promotion +(30) Scan parquet spark_catalog.default.promotion Output [2]: [p_promo_sk#19, p_channel_tv#20] Batched: true Location [not included in comparison]/{warehouse_dir}/promotion] @@ -303,7 +303,7 @@ Functions [3]: [sum(UnscaledValue(ss_ext_sales_price#5)), sum(coalesce(cast(sr_r Aggregate Attributes [3]: [sum(UnscaledValue(ss_ext_sales_price#5))#31, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#32, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#33] Results [5]: [store channel AS channel#34, concat(store, s_store_id#16) AS id#35, MakeDecimal(sum(UnscaledValue(ss_ext_sales_price#5))#31,17,2) AS sales#36, sum(coalesce(cast(sr_return_amt#11 as decimal(12,2)), 0.00))#32 AS returns#37, sum((ss_net_profit#6 - coalesce(cast(sr_net_loss#12 as decimal(12,2)), 0.00)))#33 AS profit#38] -(unknown) Scan parquet spark_catalog.default.catalog_sales +(40) Scan parquet spark_catalog.default.catalog_sales Output [7]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_order_number#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Batched: true Location: InMemoryFileIndex [] @@ -326,7 +326,7 @@ Arguments: hashpartitioning(cs_item_sk#40, cs_order_number#42, 5), ENSURE_REQUIR Input [7]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_order_number#42, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45] Arguments: [cs_item_sk#40 ASC NULLS FIRST, cs_order_number#42 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.catalog_returns +(45) Scan parquet spark_catalog.default.catalog_returns Output [5]: [cr_item_sk#47, cr_order_number#48, cr_return_amount#49, cr_net_loss#50, cr_returned_date_sk#51] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_returns] @@ -375,7 +375,7 @@ Join condition: None Output [7]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_ext_sales_price#43, cs_net_profit#44, cr_return_amount#49, cr_net_loss#50] Input [9]: [cs_catalog_page_sk#39, cs_item_sk#40, cs_promo_sk#41, cs_ext_sales_price#43, cs_net_profit#44, cs_sold_date_sk#45, cr_return_amount#49, cr_net_loss#50, d_date_sk#52] -(unknown) Scan parquet spark_catalog.default.catalog_page +(56) Scan parquet spark_catalog.default.catalog_page Output [2]: [cp_catalog_page_sk#53, cp_catalog_page_id#54] Batched: true Location [not included in comparison]/{warehouse_dir}/catalog_page] @@ -447,7 +447,7 @@ Functions [3]: [sum(UnscaledValue(cs_ext_sales_price#43)), sum(coalesce(cast(cr_ Aggregate Attributes [3]: [sum(UnscaledValue(cs_ext_sales_price#43))#67, sum(coalesce(cast(cr_return_amount#49 as decimal(12,2)), 0.00))#68, sum((cs_net_profit#44 - coalesce(cast(cr_net_loss#50 as decimal(12,2)), 0.00)))#69] Results [5]: [catalog channel AS channel#70, concat(catalog_page, cp_catalog_page_id#54) AS id#71, MakeDecimal(sum(UnscaledValue(cs_ext_sales_price#43))#67,17,2) AS sales#72, sum(coalesce(cast(cr_return_amount#49 as decimal(12,2)), 0.00))#68 AS returns#73, sum((cs_net_profit#44 - coalesce(cast(cr_net_loss#50 as decimal(12,2)), 0.00)))#69 AS profit#74] -(unknown) Scan parquet spark_catalog.default.web_sales +(71) Scan parquet spark_catalog.default.web_sales Output [7]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_order_number#78, ws_ext_sales_price#79, ws_net_profit#80, ws_sold_date_sk#81] Batched: true Location: InMemoryFileIndex [] @@ -470,7 +470,7 @@ Arguments: hashpartitioning(ws_item_sk#75, ws_order_number#78, 5), ENSURE_REQUIR Input [7]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_order_number#78, ws_ext_sales_price#79, ws_net_profit#80, ws_sold_date_sk#81] Arguments: [ws_item_sk#75 ASC NULLS FIRST, ws_order_number#78 ASC NULLS FIRST], false, 0 -(unknown) Scan parquet spark_catalog.default.web_returns +(76) Scan parquet spark_catalog.default.web_returns Output [5]: [wr_item_sk#83, wr_order_number#84, wr_return_amt#85, wr_net_loss#86, wr_returned_date_sk#87] Batched: true Location [not included in comparison]/{warehouse_dir}/web_returns] @@ -519,7 +519,7 @@ Join condition: None Output [7]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_ext_sales_price#79, ws_net_profit#80, wr_return_amt#85, wr_net_loss#86] Input [9]: [ws_item_sk#75, ws_web_site_sk#76, ws_promo_sk#77, ws_ext_sales_price#79, ws_net_profit#80, ws_sold_date_sk#81, wr_return_amt#85, wr_net_loss#86, d_date_sk#88] -(unknown) Scan parquet spark_catalog.default.web_site +(87) Scan parquet spark_catalog.default.web_site Output [2]: [web_site_sk#89, web_site_id#90] Batched: true Location [not included in comparison]/{warehouse_dir}/web_site] @@ -701,7 +701,7 @@ BroadcastExchange (125) +- CometScan parquet spark_catalog.default.date_dim (121) -(unknown) Scan parquet spark_catalog.default.date_dim +(121) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#14, d_date#171] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q86a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q86a/explain.txt index 610ae8967..ff41ac064 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q86a/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q86a/explain.txt @@ -35,7 +35,7 @@ TakeOrderedAndProject (34) +- ReusedExchange (21) -(unknown) Scan parquet spark_catalog.default.web_sales +(1) Scan parquet spark_catalog.default.web_sales Output [3]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -63,7 +63,7 @@ Join condition: None Output [2]: [ws_item_sk#1, ws_net_paid#2] Input [4]: [ws_item_sk#1, ws_net_paid#2, ws_sold_date_sk#3, d_date_sk#5] -(unknown) Scan parquet spark_catalog.default.item +(7) Scan parquet spark_catalog.default.item Output [3]: [i_item_sk#6, i_class#7, i_category#8] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -215,7 +215,7 @@ BroadcastExchange (39) +- CometScan parquet spark_catalog.default.date_dim (35) -(unknown) Scan parquet spark_catalog.default.date_dim +(35) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#5, d_month_seq#42] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim] diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt index 7fa138d5e..f77b7ec93 100644 --- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt +++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7/q98/explain.txt @@ -22,7 +22,7 @@ +- ReusedExchange (10) -(unknown) Scan parquet spark_catalog.default.store_sales +(1) Scan parquet spark_catalog.default.store_sales Output [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] Batched: true Location: InMemoryFileIndex [] @@ -37,7 +37,7 @@ Condition : isnotnull(ss_item_sk#1) (3) ColumnarToRow [codegen id : 3] Input [3]: [ss_item_sk#1, ss_ext_sales_price#2, ss_sold_date_sk#3] -(unknown) Scan parquet spark_catalog.default.item +(4) Scan parquet spark_catalog.default.item Output [6]: [i_item_sk#5, i_item_id#6, i_item_desc#7, i_current_price#8, i_class#9, i_category#10] Batched: true Location [not included in comparison]/{warehouse_dir}/item] @@ -130,7 +130,7 @@ BroadcastExchange (26) +- CometScan parquet spark_catalog.default.date_dim (22) -(unknown) Scan parquet spark_catalog.default.date_dim +(22) Scan parquet spark_catalog.default.date_dim Output [2]: [d_date_sk#11, d_date#19] Batched: true Location [not included in comparison]/{warehouse_dir}/date_dim]