diff --git a/src/query/ast/src/ast/format/ast_format.rs b/src/query/ast/src/ast/format/ast_format.rs index f7a682e17f82..4cfbbe36c80c 100644 --- a/src/query/ast/src/ast/format/ast_format.rs +++ b/src/query/ast/src/ast/format/ast_format.rs @@ -18,6 +18,7 @@ use databend_common_exception::Result; use databend_common_exception::Span; use databend_common_meta_app::principal::PrincipalIdentity; use databend_common_meta_app::principal::UserIdentity; +use itertools::Itertools; use crate::ast::*; @@ -699,23 +700,49 @@ impl<'ast> Visitor<'ast> for AstFormatVisitor { self.children.push(node); } - fn visit_explain(&mut self, kind: &'ast ExplainKind, query: &'ast Statement) { + fn visit_explain( + &mut self, + kind: &'ast ExplainKind, + options: &'ast [ExplainOption], + query: &'ast Statement, + ) { self.visit_statement(query); let child = self.children.pop().unwrap(); - let name = format!("Explain{}", match kind { - ExplainKind::Ast(_) => "Ast", - ExplainKind::Syntax(_) => "Syntax", - ExplainKind::Graph => "Graph", - ExplainKind::Pipeline => "Pipeline", - ExplainKind::Fragments => "Fragments", - ExplainKind::Raw => "Raw", - ExplainKind::Optimized => "Optimized", - ExplainKind::Plan => "Plan", - ExplainKind::Memo(_) => "Memo", - ExplainKind::JOIN => "JOIN", - ExplainKind::AnalyzePlan => "Analyze", - }); + let name = format!( + "Explain{}{}", + match kind { + ExplainKind::Ast(_) => "Ast", + ExplainKind::Syntax(_) => "Syntax", + ExplainKind::Graph => "Graph", + ExplainKind::Pipeline => "Pipeline", + ExplainKind::Fragments => "Fragments", + ExplainKind::Raw => "Raw", + ExplainKind::Optimized => "Optimized", + ExplainKind::Plan => "Plan", + ExplainKind::Memo(_) => "Memo", + ExplainKind::Join => "Join", + ExplainKind::AnalyzePlan => "Analyze", + }, + if options.is_empty() { + "".to_string() + } else { + format!( + "({})", + options + .iter() + .flat_map(|opt| { + match opt { + ExplainOption::Verbose(true) => Some("Verbose"), + ExplainOption::Logical(true) => Some("Logical"), + ExplainOption::Optimized(true) => Some("Optimized"), + _ => None, + } + }) + .join(", ") + ) + } + ); let format_ctx = AstFormatContext::with_children(name, 1); let node = FormatTreeNode::with_children(format_ctx, vec![child]); self.children.push(node); diff --git a/src/query/ast/src/ast/statements/explain.rs b/src/query/ast/src/ast/statements/explain.rs index 624806980c33..91c7ffd2af47 100644 --- a/src/query/ast/src/ast/statements/explain.rs +++ b/src/query/ast/src/ast/statements/explain.rs @@ -22,12 +22,23 @@ pub enum ExplainKind { Graph, Pipeline, Fragments, + + // `EXPLAIN RAW` and `EXPLAIN OPTIMIZED` will be deprecated in the future, + // use explain options instead Raw, Optimized, + Plan, - JOIN, + Join, // Explain analyze plan AnalyzePlan, } + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ExplainOption { + Verbose(bool), + Logical(bool), + Optimized(bool), +} diff --git a/src/query/ast/src/ast/statements/statement.rs b/src/query/ast/src/ast/statements/statement.rs index c5ea14adcaad..e76e961902da 100644 --- a/src/query/ast/src/ast/statements/statement.rs +++ b/src/query/ast/src/ast/statements/statement.rs @@ -19,6 +19,7 @@ use databend_common_meta_app::principal::FileFormatOptionsAst; use databend_common_meta_app::principal::PrincipalIdentity; use databend_common_meta_app::principal::UserIdentity; use databend_common_meta_app::schema::CreateOption; +use itertools::Itertools; use super::merge_into::MergeIntoStmt; use super::*; @@ -36,6 +37,7 @@ pub enum Statement { Query(Box), Explain { kind: ExplainKind, + options: Vec, query: Box, }, ExplainAnalyze { @@ -334,8 +336,34 @@ impl Statement { impl Display for Statement { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { - Statement::Explain { kind, query } => { + Statement::Explain { + options, + kind, + query, + } => { write!(f, "EXPLAIN")?; + if !options.is_empty() { + write!( + f, + "({})", + options + .iter() + .map(|opt| { + match opt { + ExplainOption::Verbose(v) => { + format!("VERBOSE = {}", v) + } + ExplainOption::Logical(v) => { + format!("LOGICAL = {}", v) + } + ExplainOption::Optimized(v) => { + format!("OPTIMIZED = {}", v) + } + } + }) + .join(", ") + )?; + } match *kind { ExplainKind::Ast(_) => write!(f, " AST")?, ExplainKind::Syntax(_) => write!(f, " SYNTAX")?, @@ -346,7 +374,7 @@ impl Display for Statement { ExplainKind::Optimized => write!(f, " Optimized")?, ExplainKind::Plan => (), ExplainKind::AnalyzePlan => write!(f, " ANALYZE")?, - ExplainKind::JOIN => write!(f, " JOIN")?, + ExplainKind::Join => write!(f, " JOIN")?, ExplainKind::Memo(_) => write!(f, " MEMO")?, } write!(f, " {query}")?; diff --git a/src/query/ast/src/ast/visitors/visitor.rs b/src/query/ast/src/ast/visitors/visitor.rs index ec4c8731ab33..408f3ef0088f 100644 --- a/src/query/ast/src/ast/visitors/visitor.rs +++ b/src/query/ast/src/ast/visitors/visitor.rs @@ -380,7 +380,13 @@ pub trait Visitor<'ast>: Sized { walk_query(self, query); } - fn visit_explain(&mut self, _kind: &'ast ExplainKind, _query: &'ast Statement) {} + fn visit_explain( + &mut self, + _kind: &'ast ExplainKind, + _options: &'ast [ExplainOption], + _query: &'ast Statement, + ) { + } fn visit_copy_into_table(&mut self, copy: &'ast CopyIntoTableStmt) { if let CopyIntoTableSource::Query(query) = ©.src { diff --git a/src/query/ast/src/ast/visitors/visitor_mut.rs b/src/query/ast/src/ast/visitors/visitor_mut.rs index 0409ffecac42..de73656e2aac 100644 --- a/src/query/ast/src/ast/visitors/visitor_mut.rs +++ b/src/query/ast/src/ast/visitors/visitor_mut.rs @@ -394,7 +394,12 @@ pub trait VisitorMut: Sized { walk_query_mut(self, query); } - fn visit_explain(&mut self, _kind: &mut ExplainKind, stmt: &mut Statement) { + fn visit_explain( + &mut self, + _kind: &mut ExplainKind, + _options: &mut [ExplainOption], + stmt: &mut Statement, + ) { walk_statement_mut(self, stmt); } diff --git a/src/query/ast/src/ast/visitors/walk.rs b/src/query/ast/src/ast/visitors/walk.rs index 0755eca97077..90be8bd8f771 100644 --- a/src/query/ast/src/ast/visitors/walk.rs +++ b/src/query/ast/src/ast/visitors/walk.rs @@ -367,7 +367,11 @@ pub fn walk_window_definition<'a, V: Visitor<'a>>( pub fn walk_statement<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Statement) { match statement { - Statement::Explain { kind, query } => visitor.visit_explain(kind, query), + Statement::Explain { + kind, + options, + query, + } => visitor.visit_explain(kind, options, query), Statement::ExplainAnalyze { query } => visitor.visit_statement(query), Statement::Query(query) => visitor.visit_query(query), Statement::Insert(insert) => visitor.visit_insert(insert), diff --git a/src/query/ast/src/ast/visitors/walk_mut.rs b/src/query/ast/src/ast/visitors/walk_mut.rs index a5afcc518519..a3b2f2e09a1c 100644 --- a/src/query/ast/src/ast/visitors/walk_mut.rs +++ b/src/query/ast/src/ast/visitors/walk_mut.rs @@ -362,7 +362,11 @@ pub fn walk_cte_mut(visitor: &mut V, cte: &mut CTE) { pub fn walk_statement_mut(visitor: &mut V, statement: &mut Statement) { match statement { - Statement::Explain { kind, query } => visitor.visit_explain(kind, &mut *query), + Statement::Explain { + kind, + options, + query, + } => visitor.visit_explain(kind, options, &mut *query), Statement::ExplainAnalyze { query } => visitor.visit_statement(&mut *query), Statement::Query(query) => visitor.visit_query(&mut *query), Statement::Insert(insert) => visitor.visit_insert(insert), diff --git a/src/query/ast/src/parser/statement.rs b/src/query/ast/src/parser/statement.rs index 528c2cb96065..8e27d9e9082e 100644 --- a/src/query/ast/src/parser/statement.rs +++ b/src/query/ast/src/parser/statement.rs @@ -63,9 +63,9 @@ pub enum CreateDatabaseOption { pub fn statement(i: Input) -> IResult { let explain = map_res( rule! { - EXPLAIN ~ ( AST | SYNTAX | PIPELINE | JOIN | GRAPH | FRAGMENTS | RAW | OPTIMIZED | MEMO )? ~ #statement + EXPLAIN ~ ( "(" ~ #comma_separated_list1(explain_option) ~ ")" )? ~ ( AST | SYNTAX | PIPELINE | JOIN | GRAPH | FRAGMENTS | RAW | OPTIMIZED | MEMO )? ~ #statement }, - |(_, opt_kind, statement)| { + |(_, options, opt_kind, statement)| { Ok(Statement::Explain { kind: match opt_kind.map(|token| token.kind) { Some(TokenKind::AST) => { @@ -83,7 +83,7 @@ pub fn statement(i: Input) -> IResult { ExplainKind::Syntax(pretty_stmt) } Some(TokenKind::PIPELINE) => ExplainKind::Pipeline, - Some(TokenKind::JOIN) => ExplainKind::JOIN, + Some(TokenKind::JOIN) => ExplainKind::Join, Some(TokenKind::GRAPH) => ExplainKind::Graph, Some(TokenKind::FRAGMENTS) => ExplainKind::Fragments, Some(TokenKind::RAW) => ExplainKind::Raw, @@ -92,6 +92,7 @@ pub fn statement(i: Input) -> IResult { None => ExplainKind::Plan, _ => unreachable!(), }, + options: options.as_ref().map_or(vec![], |(_, opts, _)| opts.clone()), query: Box::new(statement.stmt), }) }, @@ -3666,3 +3667,17 @@ pub fn alter_password_action(i: Input) -> IResult { | #unset_options )(i) } + +pub fn explain_option(i: Input) -> IResult { + map( + rule! { + VERBOSE | LOGICAL | OPTIMIZED + }, + |opt| match &opt.kind { + VERBOSE => ExplainOption::Verbose(true), + LOGICAL => ExplainOption::Logical(true), + OPTIMIZED => ExplainOption::Optimized(true), + _ => unreachable!(), + }, + )(i) +} diff --git a/src/query/ast/src/parser/token.rs b/src/query/ast/src/parser/token.rs index 23cdf98b18a5..233d3b6e01e4 100644 --- a/src/query/ast/src/parser/token.rs +++ b/src/query/ast/src/parser/token.rs @@ -673,6 +673,8 @@ pub enum TokenKind { LOCATION_PREFIX, #[token("LOCKS", ignore(ascii_case))] LOCKS, + #[token("LOGICAL", ignore(ascii_case))] + LOGICAL, #[token("ACCOUNT", ignore(ascii_case))] ACCOUNT, #[token("SECONDARY", ignore(ascii_case))] @@ -1070,6 +1072,8 @@ pub enum TokenKind { VARCHAR, #[token("VARIANT", ignore(ascii_case))] VARIANT, + #[token("VERBOSE", ignore(ascii_case))] + VERBOSE, #[token("VIEW", ignore(ascii_case))] VIEW, #[token("VIRTUAL", ignore(ascii_case))] diff --git a/src/query/ast/tests/it/parser.rs b/src/query/ast/tests/it/parser.rs index ac6837ee934c..6d2e4eeee490 100644 --- a/src/query/ast/tests/it/parser.rs +++ b/src/query/ast/tests/it/parser.rs @@ -97,6 +97,7 @@ fn test_statement() { r#"show create table a.b format TabSeparatedWithNamesAndTypes;"#, r#"explain pipeline select a from b;"#, r#"explain pipeline select a from t1 ignore_result;"#, + r#"explain(verbose, logical, optimized) select * from t where a = 1"#, r#"describe a;"#, r#"describe a format TabSeparatedWithNamesAndTypes;"#, r#"CREATE AGGREGATING INDEX idx1 AS SELECT SUM(a), b FROM t1 WHERE b > 3 GROUP BY b;"#, diff --git a/src/query/ast/tests/it/testdata/statement.txt b/src/query/ast/tests/it/testdata/statement.txt index 37f0a6303580..798499a6ae65 100644 --- a/src/query/ast/tests/it/testdata/statement.txt +++ b/src/query/ast/tests/it/testdata/statement.txt @@ -498,6 +498,7 @@ EXPLAIN PIPELINE SELECT a FROM b ---------- AST ------------ Explain { kind: Pipeline, + options: [], query: Query( Query { span: Some( @@ -576,6 +577,7 @@ EXPLAIN PIPELINE SELECT a FROM t1 ---------- AST ------------ Explain { kind: Pipeline, + options: [], query: Query( Query { span: Some( @@ -647,6 +649,117 @@ Explain { } +---------- Input ---------- +explain(verbose, logical, optimized) select * from t where a = 1 +---------- Output --------- +EXPLAIN(VERBOSE = true, LOGICAL = true, OPTIMIZED = true) SELECT * FROM t WHERE (a = 1) +---------- AST ------------ +Explain { + kind: Plan, + options: [ + Verbose( + true, + ), + Logical( + true, + ), + Optimized( + true, + ), + ], + query: Query( + Query { + span: Some( + 37..64, + ), + with: None, + body: Select( + SelectStmt { + span: Some( + 37..64, + ), + hints: None, + distinct: false, + select_list: [ + StarColumns { + qualified: [ + Star( + Some( + 44..45, + ), + ), + ], + column_filter: None, + }, + ], + from: [ + Table { + span: Some( + 51..52, + ), + catalog: None, + database: None, + table: Identifier { + span: Some( + 51..52, + ), + name: "t", + quote: None, + }, + alias: None, + travel_point: None, + since_point: None, + pivot: None, + unpivot: None, + }, + ], + selection: Some( + BinaryOp { + span: Some( + 61..62, + ), + op: Eq, + left: ColumnRef { + span: Some( + 59..60, + ), + database: None, + table: None, + column: Name( + Identifier { + span: Some( + 59..60, + ), + name: "a", + quote: None, + }, + ), + }, + right: Literal { + span: Some( + 63..64, + ), + lit: UInt64( + 1, + ), + }, + }, + ), + group_by: None, + having: None, + window_list: None, + qualify: None, + }, + ), + order_by: [], + limit: [], + offset: None, + ignore_result: false, + }, + ), +} + + ---------- Input ---------- describe a; ---------- Output --------- diff --git a/src/query/service/src/interpreters/access/management_mode_access.rs b/src/query/service/src/interpreters/access/management_mode_access.rs index 5e1c23e9b217..e2278221856e 100644 --- a/src/query/service/src/interpreters/access/management_mode_access.rs +++ b/src/query/service/src/interpreters/access/management_mode_access.rs @@ -128,7 +128,7 @@ impl AccessChecker for ManagementModeAccess { if !ok { return Err(ErrorCode::ManagementModePermissionDenied(format!( "Management Mode Error: Access denied for operation:{:?} in management-mode", - plan.format_indent() + plan.format_indent(false)? ))); } }; diff --git a/src/query/service/src/interpreters/interpreter_explain.rs b/src/query/service/src/interpreters/interpreter_explain.rs index eb685b0bca35..133455d68ce3 100644 --- a/src/query/service/src/interpreters/interpreter_explain.rs +++ b/src/query/service/src/interpreters/interpreter_explain.rs @@ -24,6 +24,7 @@ use databend_common_expression::types::StringType; use databend_common_expression::DataBlock; use databend_common_expression::FromData; use databend_common_pipeline_core::processors::PlanProfile; +use databend_common_sql::binder::ExplainConfig; use databend_common_sql::optimizer::ColumnSet; use databend_common_sql::plans::UpdatePlan; use databend_common_sql::BindContext; @@ -51,6 +52,7 @@ use crate::sql::plans::Plan; pub struct ExplainInterpreter { ctx: Arc, + config: ExplainConfig, kind: ExplainKind, plan: Plan, } @@ -68,8 +70,8 @@ impl Interpreter for ExplainInterpreter { #[async_backtrace::framed] async fn execute2(&self) -> Result { let blocks = match &self.kind { - ExplainKind::Raw => self.explain_plan(&self.plan)?, - ExplainKind::Optimized => self.explain_plan(&self.plan)?, + ExplainKind::Raw | ExplainKind::Optimized => self.explain_plan(&self.plan)?, + ExplainKind::Plan if self.config.logical => self.explain_plan(&self.plan)?, ExplainKind::Plan => match &self.plan { Plan::Query { s_expr, @@ -123,7 +125,7 @@ impl Interpreter for ExplainInterpreter { _ => self.explain_plan(&self.plan)?, }, - ExplainKind::JOIN => match &self.plan { + ExplainKind::Join => match &self.plan { Plan::Query { s_expr, metadata, @@ -225,12 +227,22 @@ impl Interpreter for ExplainInterpreter { } impl ExplainInterpreter { - pub fn try_create(ctx: Arc, plan: Plan, kind: ExplainKind) -> Result { - Ok(ExplainInterpreter { ctx, plan, kind }) + pub fn try_create( + ctx: Arc, + plan: Plan, + kind: ExplainKind, + config: ExplainConfig, + ) -> Result { + Ok(ExplainInterpreter { + ctx, + plan, + kind, + config, + }) } pub fn explain_plan(&self, plan: &Plan) -> Result> { - let result = plan.format_indent()?; + let result = plan.format_indent(self.config.verbose)?; let line_split_result: Vec<&str> = result.lines().collect(); let formatted_plan = StringType::from_data(line_split_result); Ok(vec![DataBlock::new_from_columns(vec![formatted_plan])]) diff --git a/src/query/service/src/interpreters/interpreter_factory.rs b/src/query/service/src/interpreters/interpreter_factory.rs index 4636c15ac74e..6aedd7187b67 100644 --- a/src/query/service/src/interpreters/interpreter_factory.rs +++ b/src/query/service/src/interpreters/interpreter_factory.rs @@ -16,6 +16,7 @@ use std::sync::Arc; use databend_common_ast::ast::ExplainKind; use databend_common_exception::Result; +use databend_common_sql::binder::ExplainConfig; use log::error; use super::interpreter_catalog_create::CreateCatalogInterpreter; @@ -97,25 +98,29 @@ impl InterpreterFactory { formatted_ast.clone(), *ignore_result, )?)), - Plan::Explain { kind, plan } => Ok(Arc::new(ExplainInterpreter::try_create( + Plan::Explain { kind, config, plan } => Ok(Arc::new(ExplainInterpreter::try_create( ctx, *plan.clone(), kind.clone(), + config.clone(), )?)), Plan::ExplainAst { formatted_string } => Ok(Arc::new(ExplainInterpreter::try_create( ctx, plan.clone(), ExplainKind::Ast(formatted_string.clone()), + ExplainConfig::default(), )?)), Plan::ExplainSyntax { formatted_sql } => Ok(Arc::new(ExplainInterpreter::try_create( ctx, plan.clone(), ExplainKind::Syntax(formatted_sql.clone()), + ExplainConfig::default(), )?)), Plan::ExplainAnalyze { plan } => Ok(Arc::new(ExplainInterpreter::try_create( ctx, *plan.clone(), ExplainKind::AnalyzePlan, + ExplainConfig::default(), )?)), Plan::CopyIntoTable(copy_plan) => Ok(Arc::new(CopyIntoTableInterpreter::try_create( diff --git a/src/query/sql/src/planner/binder/binder.rs b/src/query/sql/src/planner/binder/binder.rs index 72c2e121475e..b875f66ab9d6 100644 --- a/src/query/sql/src/planner/binder/binder.rs +++ b/src/query/sql/src/planner/binder/binder.rs @@ -18,7 +18,6 @@ use std::sync::Arc; use chrono_tz::Tz; use databend_common_ast::ast::format_statement; -use databend_common_ast::ast::ExplainKind; use databend_common_ast::ast::Hint; use databend_common_ast::ast::Identifier; use databend_common_ast::ast::Statement; @@ -217,12 +216,8 @@ impl<'a> Binder { } } - Statement::Explain { query, kind } => { - match kind { - ExplainKind::Ast(formatted_stmt) => Plan::ExplainAst { formatted_string: formatted_stmt.clone() }, - ExplainKind::Syntax(formatted_sql) => Plan::ExplainSyntax { formatted_sql: formatted_sql.clone() }, - _ => Plan::Explain { kind: kind.clone(), plan: Box::new(self.bind_statement(bind_context, query).await?) }, - } + Statement::Explain { query, options, kind } => { + self.bind_explain(bind_context, kind, options, query).await? } Statement::ExplainAnalyze { query } => { diff --git a/src/query/sql/src/planner/binder/ddl/index.rs b/src/query/sql/src/planner/binder/ddl/index.rs index ba0763616818..fd413b47e352 100644 --- a/src/query/sql/src/planner/binder/ddl/index.rs +++ b/src/query/sql/src/planner/binder/ddl/index.rs @@ -62,7 +62,7 @@ impl Binder { Plan::Query { metadata, .. } => { self.do_bind_query_index(bind_context, metadata).await?; } - Plan::Explain { kind, plan } + Plan::Explain { kind, plan, .. } if matches!(kind, ExplainKind::Plan) && matches!(**plan, Plan::Query { .. }) => { match **plan { diff --git a/src/query/sql/src/planner/binder/explain.rs b/src/query/sql/src/planner/binder/explain.rs new file mode 100644 index 000000000000..d5bd97c3d056 --- /dev/null +++ b/src/query/sql/src/planner/binder/explain.rs @@ -0,0 +1,152 @@ +// Copyright 2021 Datafuse Labs +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use databend_common_ast::ast::ExplainKind; +use databend_common_ast::ast::ExplainOption; +use databend_common_ast::ast::Statement; +use databend_common_exception::ErrorCode; +use databend_common_exception::Result; + +use crate::plans::Plan; +use crate::BindContext; +use crate::Binder; + +/// Configuration for the EXPLAIN statement. +#[derive(Debug, Clone, PartialEq, Default)] +pub struct ExplainConfig { + pub verbose: bool, + pub logical: bool, + pub optimized: bool, +} + +struct ExplainConfigBuilder { + verbose: bool, + logical: bool, + optimized: bool, +} + +impl ExplainConfigBuilder { + pub fn new() -> Self { + ExplainConfigBuilder { + verbose: false, + logical: false, + optimized: false, + } + } + + pub fn add_option(mut self, option: &ExplainOption) -> Self { + match option { + ExplainOption::Verbose(v) => self.verbose = *v, + ExplainOption::Logical(v) => self.logical = *v, + ExplainOption::Optimized(v) => { + if *v { + self.logical = true; + } + self.optimized = *v; + } + } + + self + } + + pub fn build(self) -> ExplainConfig { + ExplainConfig { + verbose: self.verbose, + logical: self.logical, + optimized: self.optimized, + } + } +} + +impl Binder { + pub async fn bind_explain( + &mut self, + bind_context: &mut BindContext, + kind: &ExplainKind, + options: &[ExplainOption], + inner: &Statement, + ) -> Result { + let mut builder = ExplainConfigBuilder::new(); + + // Rewrite `EXPLAIN RAW` to `EXPLAIN(LOGICAL)` + if matches!(kind, ExplainKind::Raw) { + builder = builder.add_option(&ExplainOption::Logical(true)); + } + + // Rewrite `EXPLAIN OPTIMIZED` to `EXPLAIN(LOGICAL, OPTIMIZED)` + if matches!(kind, ExplainKind::Optimized) { + builder = builder.add_option(&ExplainOption::Logical(true)); + builder = builder.add_option(&ExplainOption::Optimized(true)); + } + + for option in options { + builder = builder.add_option(option); + } + + let config = builder.build(); + + // Validate the configuration + validate_explain_config(kind, &config)?; + + let plan = match kind { + ExplainKind::Ast(formatted_stmt) => Plan::ExplainAst { + formatted_string: formatted_stmt.clone(), + }, + ExplainKind::Syntax(formatted_sql) => Plan::ExplainSyntax { + formatted_sql: formatted_sql.clone(), + }, + _ => Plan::Explain { + kind: kind.clone(), + config, + plan: Box::new(self.bind_statement(bind_context, inner).await?), + }, + }; + + Ok(plan) + } +} + +fn validate_explain_config(kind: &ExplainKind, config: &ExplainConfig) -> Result<()> { + if !matches!( + kind, + ExplainKind::Plan | ExplainKind::Raw | ExplainKind::Optimized + ) && config.logical + { + return Err(ErrorCode::SyntaxException( + "LOGICAL option is only supported for EXPLAIN SELECT statement".to_string(), + )); + } + + if !matches!( + kind, + ExplainKind::Plan | ExplainKind::Raw | ExplainKind::Optimized + ) && config.optimized + { + return Err(ErrorCode::SyntaxException( + "OPTIMIZED option is only supported for EXPLAIN SELECT statement".to_string(), + )); + } + + if !matches!( + kind, + ExplainKind::Plan | ExplainKind::Raw | ExplainKind::Optimized + ) && config.verbose + { + return Err(ErrorCode::SyntaxException( + "VERBOSE option is only supported for EXPLAIN SELECT statement".to_string(), + )); + } + + Ok(()) +} diff --git a/src/query/sql/src/planner/binder/mod.rs b/src/query/sql/src/planner/binder/mod.rs index b4d26c218e14..1adbc668e949 100644 --- a/src/query/sql/src/planner/binder/mod.rs +++ b/src/query/sql/src/planner/binder/mod.rs @@ -25,6 +25,7 @@ mod copy_into_table; mod ddl; mod delete; mod distinct; +mod explain; mod having; mod insert; mod internal_column_factory; @@ -61,6 +62,7 @@ pub use column_binding::ColumnBinding; pub use column_binding::ColumnBindingBuilder; pub use copy_into_table::resolve_file_location; pub use copy_into_table::resolve_stage_location; +pub use explain::ExplainConfig; pub use internal_column_factory::INTERNAL_COLUMN_FACTORY; pub use location::get_storage_params_from_options; pub use location::parse_storage_params_from_uri; diff --git a/src/query/sql/src/planner/format/display.rs b/src/query/sql/src/planner/format/display.rs index 000a3f549c4d..863b212e456b 100644 --- a/src/query/sql/src/planner/format/display.rs +++ b/src/query/sql/src/planner/format/display.rs @@ -13,8 +13,13 @@ // limitations under the License. use databend_common_ast::ast::FormatTreeNode; +use databend_common_exception::Result; +use itertools::Itertools; +use crate::optimizer::RelExpr; +use crate::optimizer::RelationalProperty; use crate::optimizer::SExpr; +use crate::optimizer::StatInfo; use crate::planner::format::display_rel_operator::to_format_tree; use crate::plans::RelOperator; use crate::ColumnEntry; @@ -132,21 +137,104 @@ impl IdHumanizer for Metadata { /// A humanizer for `SExpr`. /// It will use `IdHumanizer` and `OperatorHumanizer` to humanize the `SExpr`. /// The result is a `FormatTreeNode` with the operator and its children. -pub struct TreeHumanizer; - -impl TreeHumanizer { - pub fn humanize_s_expr>( - id_humanizer: &I, - operator_humanizer: &O, - s_expr: &SExpr, - ) -> O::Output { +pub struct TreeHumanizer<'a, I, O> { + id_humanizer: &'a I, + operator_humanizer: &'a O, + verbose: bool, +} + +impl<'a, I: IdHumanizer, O: OperatorHumanizer> + TreeHumanizer<'a, I, O> +{ + pub fn new(id_humanizer: &'a I, operator_humanizer: &'a O, verbose: bool) -> Self { + TreeHumanizer { + id_humanizer, + operator_humanizer, + verbose, + } + } + + pub fn humanize_s_expr(&self, s_expr: &SExpr) -> Result { let op = s_expr.plan(); - let mut tree = operator_humanizer.humanize_operator(id_humanizer, op); + let mut tree = self + .operator_humanizer + .humanize_operator(self.id_humanizer, op); let children = s_expr .children() - .map(|v| Self::humanize_s_expr(id_humanizer, operator_humanizer, v)) - .collect::>(); + .map(|s_expr| self.humanize_s_expr(s_expr)) + .collect::>>()?; + + if self.verbose { + let rel_expr = RelExpr::with_s_expr(s_expr); + let prop = rel_expr.derive_relational_prop()?; + let stat = rel_expr.derive_cardinality()?; + let properties = self.humanize_property(&prop); + let stats = self.humanize_stat(&stat)?; + tree.children.extend(properties); + tree.children.extend(stats); + } + tree.children.extend(children); - tree + Ok(tree) + } + + fn humanize_property(&self, prop: &RelationalProperty) -> Vec { + let output_columns = prop + .output_columns + .iter() + .map(|idx| self.id_humanizer.humanize_column_id(*idx)) + .sorted() + .collect::>(); + + let outer_columns = prop + .outer_columns + .iter() + .map(|idx| self.id_humanizer.humanize_column_id(*idx)) + .sorted() + .collect::>(); + + let used_columns = prop + .used_columns + .iter() + .map(|idx| self.id_humanizer.humanize_column_id(*idx)) + .sorted() + .collect::>(); + + vec![ + FormatTreeNode::new(format!("output columns: [{}]", output_columns.join(", "))), + FormatTreeNode::new(format!("outer columns: [{}]", outer_columns.join(", "))), + FormatTreeNode::new(format!("used columns: [{}]", used_columns.join(", "))), + ] + } + + fn humanize_stat(&self, stat: &StatInfo) -> Result> { + let cardinality = format!("{:.3}", stat.cardinality); + + let precise_cardinality = if let Some(card) = stat.statistics.precise_cardinality { + format!("{}", card) + } else { + "N/A".to_string() + }; + + let column_stats = stat + .statistics + .column_stats + .iter() + .map(|(column, hist)| { + let column = self.id_humanizer.humanize_column_id(*column); + let hist = format!( + "{{ min: {}, max: {}, ndv: {}, null count: {} }}", + hist.min, hist.max, hist.ndv, hist.null_count + ); + FormatTreeNode::new(format!("{}: {}", column, hist)) + }) + .sorted_by(|a, b| a.payload.cmp(&b.payload)) + .collect::>(); + + Ok(vec![ + FormatTreeNode::new(format!("cardinality: {}", cardinality)), + FormatTreeNode::new(format!("precise cardinality: {}", precise_cardinality)), + FormatTreeNode::with_children("statistics".to_string(), column_stats), + ]) } } diff --git a/src/query/sql/src/planner/format/display_plan.rs b/src/query/sql/src/planner/format/display_plan.rs index 46357138886e..91d839bd6a52 100644 --- a/src/query/sql/src/planner/format/display_plan.rs +++ b/src/query/sql/src/planner/format/display_plan.rs @@ -40,16 +40,16 @@ use crate::ScalarExpr; use crate::Visibility; impl Plan { - pub fn format_indent(&self) -> Result { + pub fn format_indent(&self, verbose: bool) -> Result { match self { Plan::Query { s_expr, metadata, .. } => { let metadata = &*metadata.read(); - s_expr.to_format_tree(metadata).format_pretty() + s_expr.to_format_tree(metadata, verbose)?.format_pretty() } - Plan::Explain { kind, plan } => { - let result = plan.format_indent()?; + Plan::Explain { kind, plan, .. } => { + let result = plan.format_indent(false)?; Ok(format!("{:?}:\n{}", kind, result)) } Plan::ExplainAst { .. } => Ok("ExplainAst".to_string()), @@ -260,7 +260,7 @@ fn format_delete(delete: &DeletePlan) -> Result { SExpr::create_unary(Arc::new(filter), Arc::new(scan_expr)) }; let metadata = &*delete.metadata.read(); - let res = s_expr.to_format_tree(metadata).format_pretty()?; + let res = s_expr.to_format_tree(metadata, false)?.format_pretty()?; Ok(format!("DeletePlan:\n{res}")) } @@ -271,7 +271,7 @@ fn format_create_table(create_table: &CreateTablePlan) -> Result { s_expr, metadata, .. } => { let metadata = &*metadata.read(); - let res = s_expr.to_format_tree(metadata); + let res = s_expr.to_format_tree(metadata, false)?; FormatTreeNode::with_children("CreateTableAsSelect".to_string(), vec![res]) .format_pretty() } @@ -372,7 +372,7 @@ fn format_merge_into(merge_into: &MergeInto) -> Result { } let s_expr = merge_into.input.as_ref(); let metadata = &*merge_into.meta_data.read(); - let input_format_child = s_expr.to_format_tree(metadata); + let input_format_child = s_expr.to_format_tree(metadata, false)?; let all_children = [ vec![distributed_format], vec![target_build_optimization_format], diff --git a/src/query/sql/src/planner/format/display_rel_operator.rs b/src/query/sql/src/planner/format/display_rel_operator.rs index 32177b45a10f..a6b0a27e8ca9 100644 --- a/src/query/sql/src/planner/format/display_rel_operator.rs +++ b/src/query/sql/src/planner/format/display_rel_operator.rs @@ -13,6 +13,7 @@ // limitations under the License. use databend_common_ast::ast::FormatTreeNode; +use databend_common_exception::Result; use itertools::Itertools; use crate::optimizer::SExpr; @@ -38,8 +39,11 @@ impl SExpr { pub fn to_format_tree>( &self, id_humanizer: &I, - ) -> FormatTreeNode { - TreeHumanizer::humanize_s_expr(id_humanizer, &DefaultOperatorHumanizer, self) + verbose: bool, + ) -> Result { + let operator_humanizer = DefaultOperatorHumanizer; + let tree_humanizer = TreeHumanizer::new(id_humanizer, &operator_humanizer, verbose); + tree_humanizer.humanize_s_expr(self) } } diff --git a/src/query/sql/src/planner/optimizer/optimizer.rs b/src/query/sql/src/planner/optimizer/optimizer.rs index a9f3aa502c09..dc3603691317 100644 --- a/src/query/sql/src/planner/optimizer/optimizer.rs +++ b/src/query/sql/src/planner/optimizer/optimizer.rs @@ -176,14 +176,15 @@ pub fn optimize(opt_ctx: OptimizerContext, plan: Plan) -> Result { formatted_ast, ignore_result, }), - Plan::Explain { kind, plan } => match kind { - ExplainKind::Raw | ExplainKind::Ast(_) | ExplainKind::Syntax(_) => { - Ok(Plan::Explain { kind, plan }) + Plan::Explain { kind, config, plan } => match kind { + ExplainKind::Ast(_) | ExplainKind::Syntax(_) => { + Ok(Plan::Explain { config, kind, plan }) } ExplainKind::Memo(_) => { if let box Plan::Query { ref s_expr, .. } = plan { let memo = get_optimized_memo(opt_ctx, *s_expr.clone())?; Ok(Plan::Explain { + config, kind: ExplainKind::Memo(display_memo(&memo)?), plan, }) @@ -193,10 +194,18 @@ pub fn optimize(opt_ctx: OptimizerContext, plan: Plan) -> Result { )) } } - _ => Ok(Plan::Explain { - kind, - plan: Box::new(optimize(opt_ctx, *plan)?), - }), + _ => { + if config.optimized || !config.logical { + let optimized_plan = optimize(opt_ctx.clone(), *plan)?; + Ok(Plan::Explain { + kind, + config, + plan: Box::new(optimized_plan), + }) + } else { + Ok(Plan::Explain { kind, config, plan }) + } + } }, Plan::ExplainAnalyze { plan } => Ok(Plan::ExplainAnalyze { plan: Box::new(optimize(opt_ctx, *plan)?), diff --git a/src/query/sql/src/planner/plans/plan.rs b/src/query/sql/src/planner/plans/plan.rs index b4590ac7f33b..71b704b26e69 100644 --- a/src/query/sql/src/planner/plans/plan.rs +++ b/src/query/sql/src/planner/plans/plan.rs @@ -25,6 +25,7 @@ use databend_common_expression::DataSchemaRef; use databend_common_expression::DataSchemaRefExt; use super::SetSecondaryRolesPlan; +use crate::binder::ExplainConfig; use crate::optimizer::SExpr; use crate::plans::copy_into_location::CopyIntoLocationPlan; use crate::plans::AddTableColumnPlan; @@ -154,6 +155,7 @@ pub enum Plan { Explain { kind: ExplainKind, + config: ExplainConfig, plan: Box, }, ExplainAst { diff --git a/tests/sqllogictests/suites/mode/standalone/explain/explain_verbose.test b/tests/sqllogictests/suites/mode/standalone/explain/explain_verbose.test new file mode 100644 index 000000000000..bbb952a36631 --- /dev/null +++ b/tests/sqllogictests/suites/mode/standalone/explain/explain_verbose.test @@ -0,0 +1,297 @@ +statement ok +drop database if exists testdb + +statement ok +create database testdb + +statement ok +use testdb + +statement ok +create table t(a int, b int) + +statement ok +insert into t select number, number + 1 from numbers(1000) + +query T +explain(verbose, logical) select * from t where a = 1 +---- +EvalScalar +├── scalars: [t.a (#0) AS (#0), t.b (#1) AS (#1)] +├── output columns: [testdb.t.a, testdb.t.b] +├── outer columns: [] +├── used columns: [testdb.t.a, testdb.t.b] +├── cardinality: 1.000 +├── precise cardinality: N/A +├── statistics +│ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } +│ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } +└── Filter + ├── filters: [eq(t.a (#0), 1)] + ├── output columns: [testdb.t.a, testdb.t.b] + ├── outer columns: [] + ├── used columns: [testdb.t.a, testdb.t.b] + ├── cardinality: 1.000 + ├── precise cardinality: N/A + ├── statistics + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + └── Scan + ├── table: testdb.t + ├── filters: [] + ├── order by: [] + ├── limit: NONE + ├── output columns: [testdb.t.a, testdb.t.b] + ├── outer columns: [] + ├── used columns: [testdb.t.a, testdb.t.b] + ├── cardinality: 1000.000 + ├── precise cardinality: 1000 + └── statistics + ├── testdb.t.a: { min: 0, max: 999, ndv: 1000, null count: 0 } + └── testdb.t.b: { min: 1, max: 1000, ndv: 1000, null count: 0 } + +query T +explain(verbose, logical, optimized) select * from t where a = 1 +---- +EvalScalar +├── scalars: [t.a (#0) AS (#0), t.b (#1) AS (#1)] +├── output columns: [testdb.t.a, testdb.t.b] +├── outer columns: [] +├── used columns: [testdb.t.a, testdb.t.b] +├── cardinality: 1.000 +├── precise cardinality: N/A +├── statistics +│ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } +│ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } +└── Filter + ├── filters: [eq(t.a (#0), 1)] + ├── output columns: [testdb.t.a, testdb.t.b] + ├── outer columns: [] + ├── used columns: [testdb.t.a, testdb.t.b] + ├── cardinality: 1.000 + ├── precise cardinality: N/A + ├── statistics + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + └── Scan + ├── table: testdb.t + ├── filters: [eq(t.a (#0), 1)] + ├── order by: [] + ├── limit: NONE + ├── output columns: [testdb.t.a, testdb.t.b] + ├── outer columns: [] + ├── used columns: [testdb.t.a, testdb.t.b] + ├── cardinality: 1000.000 + ├── precise cardinality: 1000 + └── statistics + ├── testdb.t.a: { min: 0, max: 999, ndv: 1000, null count: 0 } + └── testdb.t.b: { min: 1, max: 1000, ndv: 1000, null count: 0 } + +query T +explain(verbose, logical, optimized) select * from t, t t1, t t2, t t3, t t4 +where t.a = 1 and t1.a = 1 and t2.a = 1 and t3.a = 1 and t4.a = 1 +---- +EvalScalar +├── scalars: [t.a (#0) AS (#0), t.b (#1) AS (#1), t1.a (#2) AS (#2), t1.b (#3) AS (#3), t2.a (#4) AS (#4), t2.b (#5) AS (#5), t3.a (#6) AS (#6), t3.b (#7) AS (#7), t4.a (#8) AS (#8), t4.b (#9) AS (#9)] +├── output columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b] +├── outer columns: [] +├── used columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b] +├── cardinality: 1.000 +├── precise cardinality: N/A +├── statistics +│ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } +│ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } +│ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } +│ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } +│ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } +│ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } +│ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } +│ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } +│ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } +│ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } +└── Join(Cross) + ├── build keys: [] + ├── probe keys: [] + ├── other filters: [] + ├── output columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b] + ├── outer columns: [] + ├── used columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b] + ├── cardinality: 1.000 + ├── precise cardinality: N/A + ├── statistics + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + ├── Join(Cross) + │ ├── build keys: [] + │ ├── probe keys: [] + │ ├── other filters: [] + │ ├── output columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b] + │ ├── outer columns: [] + │ ├── used columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b, testdb.t.b] + │ ├── cardinality: 1.000 + │ ├── precise cardinality: N/A + │ ├── statistics + │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ ├── Join(Cross) + │ │ ├── build keys: [] + │ │ ├── probe keys: [] + │ │ ├── other filters: [] + │ │ ├── output columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b] + │ │ ├── outer columns: [] + │ │ ├── used columns: [testdb.t.a, testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b, testdb.t.b] + │ │ ├── cardinality: 1.000 + │ │ ├── precise cardinality: N/A + │ │ ├── statistics + │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ ├── Join(Cross) + │ │ │ ├── build keys: [] + │ │ │ ├── probe keys: [] + │ │ │ ├── other filters: [] + │ │ │ ├── output columns: [testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b] + │ │ │ ├── outer columns: [] + │ │ │ ├── used columns: [testdb.t.a, testdb.t.a, testdb.t.b, testdb.t.b] + │ │ │ ├── cardinality: 1.000 + │ │ │ ├── precise cardinality: N/A + │ │ │ ├── statistics + │ │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ │ ├── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ │ ├── Filter + │ │ │ │ ├── filters: [eq(t.a (#0), 1)] + │ │ │ │ ├── output columns: [testdb.t.a, testdb.t.b] + │ │ │ │ ├── outer columns: [] + │ │ │ │ ├── used columns: [testdb.t.a, testdb.t.b] + │ │ │ │ ├── cardinality: 1.000 + │ │ │ │ ├── precise cardinality: N/A + │ │ │ │ ├── statistics + │ │ │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ │ │ └── Scan + │ │ │ │ ├── table: testdb.t + │ │ │ │ ├── filters: [eq(t.a (#0), 1)] + │ │ │ │ ├── order by: [] + │ │ │ │ ├── limit: NONE + │ │ │ │ ├── output columns: [testdb.t.a, testdb.t.b] + │ │ │ │ ├── outer columns: [] + │ │ │ │ ├── used columns: [testdb.t.a, testdb.t.b] + │ │ │ │ ├── cardinality: 1000.000 + │ │ │ │ ├── precise cardinality: 1000 + │ │ │ │ └── statistics + │ │ │ │ ├── testdb.t.a: { min: 0, max: 999, ndv: 1000, null count: 0 } + │ │ │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1000, null count: 0 } + │ │ │ └── Filter + │ │ │ ├── filters: [eq(t1.a (#2), 1)] + │ │ │ ├── output columns: [testdb.t.a, testdb.t.b] + │ │ │ ├── outer columns: [] + │ │ │ ├── used columns: [testdb.t.a, testdb.t.b] + │ │ │ ├── cardinality: 1.000 + │ │ │ ├── precise cardinality: N/A + │ │ │ ├── statistics + │ │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ │ └── Scan + │ │ │ ├── table: testdb.t + │ │ │ ├── filters: [eq(t.a (#2), 1)] + │ │ │ ├── order by: [] + │ │ │ ├── limit: NONE + │ │ │ ├── output columns: [testdb.t.a, testdb.t.b] + │ │ │ ├── outer columns: [] + │ │ │ ├── used columns: [testdb.t.a, testdb.t.b] + │ │ │ ├── cardinality: 1000.000 + │ │ │ ├── precise cardinality: 1000 + │ │ │ └── statistics + │ │ │ ├── testdb.t.a: { min: 0, max: 999, ndv: 1000, null count: 0 } + │ │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1000, null count: 0 } + │ │ └── Filter + │ │ ├── filters: [eq(t2.a (#4), 1)] + │ │ ├── output columns: [testdb.t.a, testdb.t.b] + │ │ ├── outer columns: [] + │ │ ├── used columns: [testdb.t.a, testdb.t.b] + │ │ ├── cardinality: 1.000 + │ │ ├── precise cardinality: N/A + │ │ ├── statistics + │ │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ │ └── Scan + │ │ ├── table: testdb.t + │ │ ├── filters: [eq(t.a (#4), 1)] + │ │ ├── order by: [] + │ │ ├── limit: NONE + │ │ ├── output columns: [testdb.t.a, testdb.t.b] + │ │ ├── outer columns: [] + │ │ ├── used columns: [testdb.t.a, testdb.t.b] + │ │ ├── cardinality: 1000.000 + │ │ ├── precise cardinality: 1000 + │ │ └── statistics + │ │ ├── testdb.t.a: { min: 0, max: 999, ndv: 1000, null count: 0 } + │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1000, null count: 0 } + │ └── Filter + │ ├── filters: [eq(t3.a (#6), 1)] + │ ├── output columns: [testdb.t.a, testdb.t.b] + │ ├── outer columns: [] + │ ├── used columns: [testdb.t.a, testdb.t.b] + │ ├── cardinality: 1.000 + │ ├── precise cardinality: N/A + │ ├── statistics + │ │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + │ └── Scan + │ ├── table: testdb.t + │ ├── filters: [eq(t.a (#6), 1)] + │ ├── order by: [] + │ ├── limit: NONE + │ ├── output columns: [testdb.t.a, testdb.t.b] + │ ├── outer columns: [] + │ ├── used columns: [testdb.t.a, testdb.t.b] + │ ├── cardinality: 1000.000 + │ ├── precise cardinality: 1000 + │ └── statistics + │ ├── testdb.t.a: { min: 0, max: 999, ndv: 1000, null count: 0 } + │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1000, null count: 0 } + └── Filter + ├── filters: [eq(t4.a (#8), 1)] + ├── output columns: [testdb.t.a, testdb.t.b] + ├── outer columns: [] + ├── used columns: [testdb.t.a, testdb.t.b] + ├── cardinality: 1.000 + ├── precise cardinality: N/A + ├── statistics + │ ├── testdb.t.a: { min: 1, max: 1, ndv: 1, null count: 0 } + │ └── testdb.t.b: { min: 1, max: 1000, ndv: 1, null count: 0 } + └── Scan + ├── table: testdb.t + ├── filters: [eq(t.a (#8), 1)] + ├── order by: [] + ├── limit: NONE + ├── output columns: [testdb.t.a, testdb.t.b] + ├── outer columns: [] + ├── used columns: [testdb.t.a, testdb.t.b] + ├── cardinality: 1000.000 + ├── precise cardinality: 1000 + └── statistics + ├── testdb.t.a: { min: 0, max: 999, ndv: 1000, null count: 0 } + └── testdb.t.b: { min: 1, max: 1000, ndv: 1000, null count: 0 } + +statement ok +drop database testdb