Skip to content

Commit

Permalink
chore: Fix warnings in both compiler and test environments (apache#164)
Browse files Browse the repository at this point in the history
  • Loading branch information
advancedxy authored Mar 4, 2024
1 parent 0ad78cd commit 72398a6
Show file tree
Hide file tree
Showing 8 changed files with 11 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -517,6 +517,7 @@ public void close() throws IOException {
}
}

@SuppressWarnings("deprecation")
private boolean loadNextRowGroupIfNecessary() throws Throwable {
// More rows can be read from loaded row group. No need to load next one.
if (rowsRead != totalRowsLoaded) return true;
Expand Down
3 changes: 2 additions & 1 deletion core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ use jni::{
};
use log::{info, LevelFilter};
use log4rs::{
append::console::ConsoleAppender,
append::console::{ConsoleAppender, Target},
config::{load_config_file, Appender, Deserializers, Root},
encode::pattern::PatternEncoder,
Config,
Expand Down Expand Up @@ -99,6 +99,7 @@ const LOG_PATTERN: &str = "{d(%y/%m/%d %H:%M:%S)} {l} {f}: {m}{n}";
// Creates a default log4rs config, which logs to console with `INFO` level.
fn default_logger_config() -> CometResult<Config> {
let console_append = ConsoleAppender::builder()
.target(Target::Stderr)
.encoder(Box::new(PatternEncoder::new(LOG_PATTERN)))
.build();
let appender = Appender::builder().build("console", Box::new(console_append));
Expand Down
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -711,9 +711,9 @@ under the License.
<artifactId>maven-surefire-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<systemProperties>
<systemPropertyVariables>
<log4j.configurationFile>file:src/test/resources/log4j2.properties</log4j.configurationFile>
</systemProperties>
</systemPropertyVariables>
<argLine>-ea -Xmx4g -Xss4m ${extraJavaTestArgs}</argLine>
</configuration>
</plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ class CometSparkSessionExtensions
// data source V1
case scanExec @ FileSourceScanExec(
HadoopFsRelation(_, partitionSchema, _, _, _: ParquetFileFormat, _),
_: Seq[AttributeReference],
_: Seq[_],
requiredSchema,
_,
_,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1472,14 +1472,14 @@ object QueryPlanSerde extends Logging with ShimQueryPlanSerde {
// With Spark 3.4, CharVarcharCodegenUtils.readSidePadding gets called to pad spaces for char
// types. Use rpad to achieve the behavior. See https://github.com/apache/spark/pull/38151
case StaticInvoke(
_: Class[CharVarcharCodegenUtils],
clz: Class[_],
_: StringType,
"readSidePadding",
arguments,
_,
true,
false,
true) if arguments.size == 2 =>
true) if clz == classOf[CharVarcharCodegenUtils] && arguments.size == 2 =>
val argsExpr = Seq(
exprToProtoInternal(Cast(arguments(0), StringType), inputs),
exprToProtoInternal(arguments(1), inputs))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

package org.apache.spark.sql.comet

import java.util.Objects

import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.Serializer
import org.apache.spark.sql.catalyst.InternalRow
Expand All @@ -29,6 +27,8 @@ import org.apache.spark.sql.execution.{ColumnarToRowExec, SparkPlan, UnaryExecNo
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics, SQLShuffleReadMetricsReporter, SQLShuffleWriteMetricsReporter}
import org.apache.spark.sql.vectorized.ColumnarBatch

import com.google.common.base.Objects

/**
* Comet physical plan node for Spark `CollectLimitExec`.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,6 @@ class CometExecSuite extends CometTestBase {
val exchanges = stripAQEPlan(df.queryExecution.executedPlan).collect {
case s: CometShuffleExchangeExec if s.shuffleType == CometColumnarShuffle =>
s
s
}
assert(exchanges.length == 4)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ class CometTPCHQuerySuite extends QueryTest with CometTPCBase with SQLQueryTestH
}

// TODO: remove once Spark 3.2 & 3.3 is no longer supported
private val shouldRegenerateGoldenFiles: Boolean =
private def shouldRegenerateGoldenFiles: Boolean =
System.getenv("SPARK_GENERATE_GOLDEN_FILES") == "1"
}

Expand Down

0 comments on commit 72398a6

Please sign in to comment.