diff --git a/common/src/main/java/org/apache/comet/parquet/ReadOptions.java b/common/src/main/java/org/apache/comet/parquet/ReadOptions.java index a25fc61a7..023f71dc7 100644 --- a/common/src/main/java/org/apache/comet/parquet/ReadOptions.java +++ b/common/src/main/java/org/apache/comet/parquet/ReadOptions.java @@ -53,10 +53,10 @@ public class ReadOptions { // to reduce the skew. This will result in a slightly larger number of connections // opened to the file system but may give improved performance. // The option is off by default. - public static final String BOSON_IO_ADJUST_READRANGE_SKEW = - "boson.parquet.read.io.adjust.readRange.skew"; + public static final String COMET_IO_ADJUST_READRANGE_SKEW = + "comet.parquet.read.io.adjust.readRange.skew"; - private static final boolean BOSON_IO_ADJUST_READRANGE_SKEW_DEFAULT = false; + private static final boolean COMET_IO_ADJUST_READRANGE_SKEW_DEFAULT = false; // Max number of concurrent tasks we expect. Used to autoconfigure S3 client connections public static final int S3A_MAX_EXPECTED_PARALLELISM = 32; @@ -180,7 +180,7 @@ public Builder(Configuration conf) { this.ioMergeRangesDelta = conf.getInt(COMET_IO_MERGE_RANGES_DELTA, COMET_IO_MERGE_RANGES_DELTA_DEFAULT); this.adjustReadRangeSkew = - conf.getBoolean(BOSON_IO_ADJUST_READRANGE_SKEW, BOSON_IO_ADJUST_READRANGE_SKEW_DEFAULT); + conf.getBoolean(COMET_IO_ADJUST_READRANGE_SKEW, COMET_IO_ADJUST_READRANGE_SKEW_DEFAULT); // override some S3 defaults setS3Config(); } diff --git a/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala b/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala index 8a68a925e..1dac14d02 100644 --- a/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala +++ b/spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala @@ -923,9 +923,9 @@ class CometAggregateSuite extends CometTestBase with AdaptiveSparkPlanHelper { test("test bool_and/bool_or") { withSQLConf(CometConf.COMET_EXEC_SHUFFLE_ENABLED.key -> "true") { - Seq(true, false).foreach { bosonColumnShuffleEnabled => + Seq(true, false).foreach { cometColumnShuffleEnabled => withSQLConf( - CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> bosonColumnShuffleEnabled.toString) { + CometConf.COMET_COLUMNAR_SHUFFLE_ENABLED.key -> cometColumnShuffleEnabled.toString) { Seq(true, false).foreach { dictionary => withSQLConf("parquet.enable.dictionary" -> dictionary.toString) { val table = "test"