Skip to content

Commit

Permalink
remove shuffle.mode config
Browse files Browse the repository at this point in the history
  • Loading branch information
andygrove committed Jan 1, 2025
1 parent 19ebbc8 commit e06745b
Show file tree
Hide file tree
Showing 12 changed files with 84 additions and 95 deletions.
12 changes: 0 additions & 12 deletions common/src/main/scala/org/apache/comet/CometConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package org.apache.comet

import java.util.Locale
import java.util.concurrent.TimeUnit

import scala.collection.mutable.ListBuffer
Expand Down Expand Up @@ -254,17 +253,6 @@ object CometConf extends ShimCometConf {
.booleanConf
.createWithDefault(true)

val COMET_SHUFFLE_MODE: ConfigEntry[String] =
conf(s"$COMET_EXEC_CONFIG_PREFIX.shuffle.mode.deprecated")
.doc(
s"Legacy configuration only used in tests. Use ${COMET_NATIVE_SHUFFLE_ENABLED.key} and " +
s"${COMET_COLUMNAR_SHUFFLE_ENABLED.key} instead")
.internal()
.stringConf
.transform(_.toLowerCase(Locale.ROOT))
.checkValues(Set("native", "jvm", "auto"))
.createWithDefault("auto")

val COMET_EXEC_BROADCAST_FORCE_ENABLED: ConfigEntry[Boolean] =
conf(s"$COMET_EXEC_CONFIG_PREFIX.broadcast.enabled")
.doc(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1214,19 +1214,11 @@ object CometSparkSessionExtensions extends Logging {
}

private[comet] def isCometNativeShuffleMode(conf: SQLConf): Boolean = {
COMET_SHUFFLE_MODE.get(conf) match {
case "native" => true
case "auto" => CometConf.COMET_NATIVE_SHUFFLE_ENABLED.get(conf)
case _ => false
}
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.get(conf)
}

private[comet] def isCometJVMShuffleMode(conf: SQLConf): Boolean = {
COMET_SHUFFLE_MODE.get(conf) match {
case "jvm" => true
case "auto" => CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.get(conf)
case _ => false
}
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.get(conf)
}

def isCometScan(op: SparkPlan): Boolean = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
val df1 = sql("SELECT count(DISTINCT 2), count(DISTINCT 2,3)")
checkSparkAnswer(df1)

Expand All @@ -72,7 +72,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
checkSparkAnswer(sql("""
|SELECT
| lag(123, 100, 321) OVER (ORDER BY id) as lag,
Expand All @@ -93,7 +93,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
val df1 = Seq(
("a", "b", "c"),
("a", "b", "c"),
Expand All @@ -114,7 +114,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
val df = sql("SELECT LAST(n) FROM lowerCaseData")
checkSparkAnswer(df)
}
Expand All @@ -129,7 +129,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
val df = sql("select sum(a), avg(a) from allNulls")
checkSparkAnswer(df)
}
Expand All @@ -140,7 +140,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test")
makeParquetFile(path, 10000, 10, false)
Expand All @@ -156,7 +156,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test")
Expand All @@ -175,7 +175,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {

sql(
"CREATE TABLE lineitem(l_extendedprice DOUBLE, l_quantity DOUBLE, l_partkey STRING) USING PARQUET")
Expand Down Expand Up @@ -212,7 +212,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> EliminateSorts.ruleName,
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test")
Expand All @@ -231,7 +231,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "false",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
withTable(table) {
sql(s"CREATE TABLE $table(col DECIMAL(5, 2)) USING PARQUET")
sql(s"INSERT INTO TABLE $table VALUES (CAST(12345.01 AS DECIMAL(5, 2)))")
Expand Down Expand Up @@ -331,7 +331,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
Seq(true, false).foreach { dictionaryEnabled =>
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> nativeShuffleEnabled.toString,
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
withParquetTable(
(0 until 100).map(i => (i, (i % 10).toString)),
"tbl",
Expand Down Expand Up @@ -514,7 +514,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
Seq(true, false).foreach { nativeShuffleEnabled =>
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> nativeShuffleEnabled.toString,
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test")
makeParquetFile(path, 1000, 20, dictionaryEnabled)
Expand Down Expand Up @@ -716,7 +716,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
test("test final count") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
Seq(false, true).foreach { dictionaryEnabled =>
withParquetTable((0 until 5).map(i => (i, i % 2)), "tbl", dictionaryEnabled) {
checkSparkAnswerAndNumOfAggregates("SELECT _2, COUNT(_1) FROM tbl GROUP BY _2", 2)
Expand All @@ -733,7 +733,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
test("test final min/max") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionaryEnabled =>
withParquetTable((0 until 5).map(i => (i, i % 2)), "tbl", dictionaryEnabled) {
checkSparkAnswerAndNumOfAggregates(
Expand All @@ -754,7 +754,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
test("test final min/max/count with result expressions") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionaryEnabled =>
withParquetTable((0 until 5).map(i => (i, i % 2)), "tbl", dictionaryEnabled) {
checkSparkAnswerAndNumOfAggregates(
Expand Down Expand Up @@ -789,7 +789,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
test("test final sum") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
Seq(false, true).foreach { dictionaryEnabled =>
withParquetTable((0L until 5L).map(i => (i, i % 2)), "tbl", dictionaryEnabled) {
checkSparkAnswerAndNumOfAggregates(
Expand All @@ -810,7 +810,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
test("test final avg") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionaryEnabled =>
withParquetTable(
(0 until 5).map(i => (i.toDouble, i.toDouble % 2)),
Expand All @@ -835,7 +835,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {

withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionaryEnabled =>
withSQLConf("parquet.enable.dictionary" -> dictionaryEnabled.toString) {
val table = "t1"
Expand Down Expand Up @@ -882,7 +882,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
test("avg null handling") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "native") {
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false") {
val table = "t1"
withTable(table) {
sql(s"create table $table(a double, b double) using parquet")
Expand All @@ -904,7 +904,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
Seq(true, false).foreach { nativeShuffleEnabled =>
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> nativeShuffleEnabled.toString,
CometConf.COMET_SHUFFLE_MODE.key -> "native",
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> "false",
CometConf.COMET_CAST_ALLOW_INCOMPATIBLE.key -> "true") {
withTempDir { dir =>
val path = new Path(dir.toURI.toString, "test")
Expand Down Expand Up @@ -944,11 +944,13 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {

test("distinct") {
withSQLConf(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true") {
Seq("native", "jvm").foreach { cometShuffleMode =>
withSQLConf(CometConf.COMET_SHUFFLE_MODE.key -> cometShuffleMode) {
Seq(true, false).foreach { nativeShuffle =>
withSQLConf(
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> nativeShuffle.toString,
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> (!nativeShuffle).toString) {
Seq(true, false).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
val cometColumnShuffleEnabled = cometShuffleMode == "jvm"
val cometColumnShuffleEnabled = !nativeShuffle
val table = "test"
withTable(table) {
sql(s"create table $table(col1 int, col2 int, col3 int) using parquet")
Expand Down Expand Up @@ -1001,7 +1003,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
SQLConf.COALESCE_PARTITIONS_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
val table = "test"
Expand Down Expand Up @@ -1047,8 +1049,10 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {

test("test bool_and/bool_or") {
withSQLConf(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true") {
Seq("native", "jvm").foreach { cometShuffleMode =>
withSQLConf(CometConf.COMET_SHUFFLE_MODE.key -> cometShuffleMode) {
Seq(true, false).foreach { nativeShuffle =>
withSQLConf(
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> nativeShuffle.toString,
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> (!nativeShuffle).toString) {
Seq(true, false).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
val table = "test"
Expand All @@ -1073,7 +1077,7 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
test("bitwise aggregate") {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
Seq(true, false).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
val table = "test"
Expand Down Expand Up @@ -1149,8 +1153,10 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {

test("covariance & correlation") {
withSQLConf(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true") {
Seq("jvm", "native").foreach { cometShuffleMode =>
withSQLConf(CometConf.COMET_SHUFFLE_MODE.key -> cometShuffleMode) {
Seq(true, false).foreach { nativeShuffle =>
withSQLConf(
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> nativeShuffle.toString,
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> (!nativeShuffle).toString) {
Seq(true, false).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
Seq(true, false).foreach { nullOnDivideByZero =>
Expand Down Expand Up @@ -1221,8 +1227,10 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {

test("var_pop and var_samp") {
withSQLConf(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true") {
Seq("native", "jvm").foreach { cometShuffleMode =>
withSQLConf(CometConf.COMET_SHUFFLE_MODE.key -> cometShuffleMode) {
Seq(true, false).foreach { nativeShuffle =>
withSQLConf(
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> nativeShuffle.toString,
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> (!nativeShuffle).toString) {
Seq(true, false).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
Seq(true, false).foreach { nullOnDivideByZero =>
Expand Down Expand Up @@ -1262,8 +1270,10 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper {
withSQLConf(
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_EXPR_STDDEV_ENABLED.key -> "true") {
Seq("native", "jvm").foreach { cometShuffleMode =>
withSQLConf(CometConf.COMET_SHUFFLE_MODE.key -> cometShuffleMode) {
Seq(true, false).foreach { nativeShuffle =>
withSQLConf(
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> nativeShuffle.toString,
CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> (!nativeShuffle).toString) {
Seq(true, false).foreach { dictionary =>
withSQLConf("parquet.enable.dictionary" -> dictionary.toString) {
Seq(true, false).foreach { nullOnDivideByZero =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ abstract class CometColumnarShuffleSuite extends CometTestBase with AdaptiveSpar
CometConf.COMET_COLUMNAR_SHUFFLE_ASYNC_ENABLED.key -> asyncShuffleEnable.toString,
CometConf.COMET_COLUMNAR_SHUFFLE_SPILL_THRESHOLD.key -> numElementsForceSpillThreshold.toString,
CometConf.COMET_EXEC_ENABLED.key -> "false",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm",
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false",
CometConf.COMET_COLUMNAR_SHUFFLE_UNIFIED_MEMORY_ALLOCATOR_IN_TEST.key ->
useUnifiedMemoryAllocator.toString,
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
Expand Down Expand Up @@ -1001,7 +1001,7 @@ class CometShuffleSuite extends CometColumnarShuffleSuite {
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1",
CometConf.COMET_EXEC_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
withParquetTable((0 until 10).map(i => (i, i % 5)), "tbl_a") {
val df = sql("SELECT * FROM tbl_a")
val shuffled = df
Expand All @@ -1021,7 +1021,7 @@ class CometShuffleSuite extends CometColumnarShuffleSuite {
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1",
CometConf.COMET_EXEC_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
withParquetTable((0 until 10).map(i => (i, i % 5)), "tbl_a") {
withParquetTable((0 until 10).map(i => (i % 10, i + 2)), "tbl_b") {
val df = sql("SELECT * FROM tbl_a")
Expand Down Expand Up @@ -1054,7 +1054,7 @@ class DisableAQECometShuffleSuite extends CometColumnarShuffleSuite {
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1",
CometConf.COMET_EXEC_ENABLED.key -> "true",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false") {
withParquetTable((0 until 10).map(i => (i, i % 5)), "tbl_a") {
withParquetTable((0 until 10).map(i => (i % 10, i + 2)), "tbl_b") {
val df = sql("SELECT * FROM tbl_a")
Expand Down Expand Up @@ -1099,7 +1099,7 @@ class CometShuffleEncryptionSuite extends CometTestBase {
withSQLConf(
CometConf.COMET_EXEC_ENABLED.key -> "false",
CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm",
CometConf.COMET_NATIVE_SHUFFLE_ENABLED.key -> "false",
CometConf.COMET_COLUMNAR_SHUFFLE_ASYNC_ENABLED.key -> asyncEnabled.toString) {
readParquetFile(path.toString) { df =>
val shuffled = df
Expand Down
Loading

0 comments on commit e06745b

Please sign in to comment.