-
Notifications
You must be signed in to change notification settings - Fork 169
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
chore: Fix warnings in both compiler and test environments #164
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -34,7 +34,7 @@ use jni::{ | |
}; | ||
use log::{info, LevelFilter}; | ||
use log4rs::{ | ||
append::console::ConsoleAppender, | ||
append::console::{ConsoleAppender, Target}, | ||
config::{load_config_file, Appender, Deserializers, Root}, | ||
encode::pattern::PatternEncoder, | ||
Config, | ||
|
@@ -99,6 +99,7 @@ const LOG_PATTERN: &str = "{d(%y/%m/%d %H:%M:%S)} {l} {f}: {m}{n}"; | |
// Creates a default log4rs config, which logs to console with `INFO` level. | ||
fn default_logger_config() -> CometResult<Config> { | ||
let console_append = ConsoleAppender::builder() | ||
.target(Target::Stderr) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Logging to stderr, so that surefire will not complain about: |
||
.encoder(Box::new(PatternEncoder::new(LOG_PATTERN))) | ||
.build(); | ||
let appender = Appender::builder().build("console", Box::new(console_append)); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -711,9 +711,9 @@ under the License. | |
<artifactId>maven-surefire-plugin</artifactId> | ||
<version>3.1.0</version> | ||
<configuration> | ||
<systemProperties> | ||
<systemPropertyVariables> | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
|
||
<log4j.configurationFile>file:src/test/resources/log4j2.properties</log4j.configurationFile> | ||
</systemProperties> | ||
</systemPropertyVariables> | ||
<argLine>-ea -Xmx4g -Xss4m ${extraJavaTestArgs}</argLine> | ||
</configuration> | ||
</plugin> | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -115,7 +115,7 @@ class CometSparkSessionExtensions | |
// data source V1 | ||
case scanExec @ FileSourceScanExec( | ||
HadoopFsRelation(_, partitionSchema, _, _, _: ParquetFileFormat, _), | ||
_: Seq[AttributeReference], | ||
_: Seq[_], | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. type erasure. |
||
requiredSchema, | ||
_, | ||
_, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1472,14 +1472,14 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde { | |
// With Spark 3.4, CharVarcharCodegenUtils.readSidePadding gets called to pad spaces for char | ||
// types. Use rpad to achieve the behavior. See https://github.com/apache/spark/pull/38151 | ||
case StaticInvoke( | ||
_: Class[CharVarcharCodegenUtils], | ||
clz: Class[_], | ||
_: StringType, | ||
"readSidePadding", | ||
arguments, | ||
_, | ||
true, | ||
false, | ||
true) if arguments.size == 2 => | ||
true) if clz == classOf[CharVarcharCodegenUtils] && arguments.size == 2 => | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. type erasure. Should check clz instead. |
||
val argsExpr = Seq( | ||
exprToProtoInternal(Cast(arguments(0), StringType), inputs), | ||
exprToProtoInternal(arguments(1), inputs)) | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -19,8 +19,6 @@ | |
|
||
package org.apache.spark.sql.comet | ||
|
||
import java.util.Objects | ||
|
||
import org.apache.spark.rdd.RDD | ||
import org.apache.spark.serializer.Serializer | ||
import org.apache.spark.sql.catalyst.InternalRow | ||
|
@@ -29,6 +27,8 @@ import org.apache.spark.sql.execution.{ColumnarToRowExec, SparkPlan, UnaryExecNo | |
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics, SQLShuffleReadMetricsReporter, SQLShuffleWriteMetricsReporter} | ||
import org.apache.spark.sql.vectorized.ColumnarBatch | ||
|
||
import com.google.common.base.Objects | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It was wrong to use |
||
|
||
/** | ||
* Comet physical plan node for Spark `CollectLimitExec`. | ||
* | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -271,7 +271,7 @@ class CometTPCHQuerySuite extends QueryTest with CometTPCBase with SQLQueryTestH | |
} | ||
|
||
// TODO: remove once Spark 3.2 & 3.3 is no longer supported | ||
private val shouldRegenerateGoldenFiles: Boolean = | ||
private def shouldRegenerateGoldenFiles: Boolean = | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. forward reference in L227, which is incorrect in runtime. |
||
System.getenv("SPARK_GENERATE_GOLDEN_FILES") == "1" | ||
} | ||
|
||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
FileSystem.getAllStatistics
is deprecated.