Skip to content

Commit

Permalink
modify tests
Browse files Browse the repository at this point in the history
  • Loading branch information
feiniaofeiafei committed May 8, 2024
1 parent c1bc4a4 commit d87c9f6
Show file tree
Hide file tree
Showing 108 changed files with 320 additions and 80 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
*/
public class CreateMTMVInfo {
public static final Logger LOG = LogManager.getLogger(CreateMTMVInfo.class);
public static final String MTMV_PLANER_DISABLE_RULES = "OLAP_SCAN_PARTITION_PRUNE";
public static final String MTMV_PLANER_DISABLE_RULES = "OLAP_SCAN_PARTITION_PRUNE,PRUNE_EMPTY_PARTITION";
private final boolean ifNotExists;
private final TableNameInfo mvName;
private List<String> keys;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,18 +62,20 @@ public abstract class AbstractInsertExecutor {

protected String errMsg = "";
protected Optional<InsertCommandContext> insertCtx;
protected final boolean emptyInsert;

/**
* Constructor
*/
public AbstractInsertExecutor(ConnectContext ctx, TableIf table, String labelName, NereidsPlanner planner,
Optional<InsertCommandContext> insertCtx) {
Optional<InsertCommandContext> insertCtx, boolean emptyInsert) {
this.ctx = ctx;
this.coordinator = EnvFactory.getInstance().createCoordinator(ctx, null, planner, ctx.getStatsErrorEstimator());
this.labelName = labelName;
this.table = table;
this.database = table.getDatabase();
this.insertCtx = insertCtx;
this.emptyInsert = emptyInsert;
}

public Coordinator getCoordinator() {
Expand Down Expand Up @@ -213,4 +215,8 @@ public void executeSingleInsert(StmtExecutor executor, long jobId) throws Except
}
afterExec(executor);
}

public boolean isEmptyInsert() {
return emptyInsert;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ public class HiveInsertExecutor extends AbstractInsertExecutor {
*/
public HiveInsertExecutor(ConnectContext ctx, HMSExternalTable table,
String labelName, NereidsPlanner planner,
Optional<InsertCommandContext> insertCtx) {
super(ctx, table, labelName, planner, insertCtx);
Optional<InsertCommandContext> insertCtx, boolean emptyInsert) {
super(ctx, table, labelName, planner, insertCtx, emptyInsert);
catalogName = table.getCatalog().getName();
transactionManager = table.getCatalog().getTransactionManager();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.doris.nereids.analyzer.UnboundTableSink;
import org.apache.doris.nereids.exceptions.AnalysisException;
import org.apache.doris.nereids.glue.LogicalPlanAdapter;
import org.apache.doris.nereids.trees.TreeNode;
import org.apache.doris.nereids.trees.plans.Explainable;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.PlanType;
Expand Down Expand Up @@ -163,25 +164,21 @@ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor executor
// return;
throw new AnalysisException("group commit is not supported in Nereids now");
}
if (physicalSink.child(0) instanceof PhysicalEmptyRelation) {
return null;
}
boolean emptyInsert = leafIsEmptyRelation(physicalSink);
OlapTable olapTable = (OlapTable) targetTableIf;
// the insertCtx contains some variables to adjust SinkNode
insertExecutor = new OlapInsertExecutor(ctx, olapTable, label, planner, insertCtx);
insertExecutor = new OlapInsertExecutor(ctx, olapTable, label, planner, insertCtx, emptyInsert);
boolean isEnableMemtableOnSinkNode =
olapTable.getTableProperty().getUseSchemaLightChange()
? insertExecutor.getCoordinator().getQueryOptions().isEnableMemtableOnSinkNode()
: false;
insertExecutor.getCoordinator().getQueryOptions()
.setEnableMemtableOnSinkNode(isEnableMemtableOnSinkNode);
} else if (physicalSink instanceof PhysicalHiveTableSink) {
if (physicalSink.child(0) instanceof PhysicalEmptyRelation) {
return null;
}
boolean emptyInsert = leafIsEmptyRelation(physicalSink);
HMSExternalTable hiveExternalTable = (HMSExternalTable) targetTableIf;
insertExecutor = new HiveInsertExecutor(ctx, hiveExternalTable, label, planner,
Optional.of(insertCtx.orElse((new HiveInsertCommandContext()))));
Optional.of(insertCtx.orElse((new HiveInsertCommandContext()))), emptyInsert);
// set hive query options
} else {
// TODO: support other table types
Expand Down Expand Up @@ -209,7 +206,8 @@ public AbstractInsertExecutor initPlan(ConnectContext ctx, StmtExecutor executor

private void runInternal(ConnectContext ctx, StmtExecutor executor) throws Exception {
AbstractInsertExecutor insertExecutor = initPlan(ctx, executor);
if (insertExecutor == null) {
// if the insert stmt data source is empty, directly return, no need to be executed.
if (insertExecutor.isEmptyInsert()) {
return;
}
insertExecutor.executeSingleInsert(executor, jobId);
Expand All @@ -228,4 +226,16 @@ public Plan getExplainPlan(ConnectContext ctx) {
public <R, C> R accept(PlanVisitor<R, C> visitor, C context) {
return visitor.visitInsertIntoTableCommand(this, context);
}

private boolean leafIsEmptyRelation(TreeNode<Plan> node) {
if (node.children() == null || node.children().isEmpty()) {
return node instanceof PhysicalEmptyRelation;
}
for (TreeNode<Plan> child : node.children()) {
if (!leafIsEmptyRelation(child)) {
return false;
}
}
return true;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,8 @@ public class OlapInsertExecutor extends AbstractInsertExecutor {
* constructor
*/
public OlapInsertExecutor(ConnectContext ctx, Table table,
String labelName, NereidsPlanner planner, Optional<InsertCommandContext> insertCtx) {
super(ctx, table, labelName, planner, insertCtx);
String labelName, NereidsPlanner planner, Optional<InsertCommandContext> insertCtx, boolean emptyInsert) {
super(ctx, table, labelName, planner, insertCtx, emptyInsert);
}

public long getTxnId() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.doris.nereids.trees.plans.logical.LogicalCatalogRelation;
import org.apache.doris.nereids.trees.plans.physical.PhysicalCatalogRelation;
import org.apache.doris.nereids.trees.plans.visitor.TableCollector.TableCollectorContext;
import org.apache.doris.qe.ConnectContext;

import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -74,7 +75,9 @@ private void expandMvAndCollect(MTMV mtmv, TableCollectorContext context) {
return;
}
try {
MTMVCache expandedMv = MTMVCache.from(mtmv, MTMVPlanUtil.createMTMVContext(mtmv));
ConnectContext mtmvContext = MTMVPlanUtil.createMTMVContext(mtmv);
mtmvContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
MTMVCache expandedMv = MTMVCache.from(mtmv, mtmvContext);
expandedMv.getLogicalPlan().accept(this, context);
} catch (AnalysisException e) {
LOG.error(String.format(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -525,7 +525,7 @@ public void testImplicitConvertSupport() throws Exception {

@Test
public void testDeleteSign() throws Exception {
String sql1 = "SELECT /*+ SET_VAR(enable_nereids_planner=true, ENABLE_FALLBACK_TO_ORIGINAL_PLANNER=false) */ * FROM db1.table1 LEFT ANTI JOIN db1.table2 ON db1.table1.siteid = db1.table2.siteid;";
String sql1 = "SELECT /*+ SET_VAR(enable_nereids_planner=true, ENABLE_FALLBACK_TO_ORIGINAL_PLANNER=false, DISABLE_NEREIDS_RULES=PRUNE_EMPTY_PARTITION) */ * FROM db1.table1 LEFT ANTI JOIN db1.table2 ON db1.table1.siteid = db1.table2.siteid;";
String explain = dorisAssert.query(sql1).explainQuery();
Assert.assertTrue(explain
.contains("__DORIS_DELETE_SIGN__ = 0"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ public static void beforeClass() throws Exception {
UtFrameUtils.createDorisCluster(runningDir);
// create connect context
connectContext = UtFrameUtils.createDefaultCtx();
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
// create database
String createDbStmtStr = "create database test;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, connectContext);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ class DistributeHintTest extends TestWithFeService implements MemoPatternMatchSu
protected void runBeforeAll() throws Exception {
createDatabase("test");
useDatabase("test");
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");

createTable("CREATE TABLE `t1` (\n"
+ " `a` int(11) NULL,\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
class CompareOuterJoinTest extends SqlTestBase {
@Test
void testStarGraphWithInnerJoin() {
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
// t2
// |
//t3-- t1 -- t4
Expand Down Expand Up @@ -72,6 +73,7 @@ void testStarGraphWithInnerJoin() {

@Test
void testRandomQuery() {
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
Plan p1 = new HyperGraphBuilder(Sets.newHashSet(JoinType.INNER_JOIN))
.randomBuildPlanWith(3, 3);
p1 = PlanChecker.from(connectContext, p1)
Expand All @@ -91,7 +93,7 @@ void testRandomQuery() {

@Test
void testInnerJoinWithFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 inner join T2 on T1.id = T2.id where T1.id = 0",
connectContext
Expand All @@ -118,7 +120,7 @@ void testInnerJoinWithFilter() {

@Test
void testInnerJoinWithFilter2() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 inner join T2 on T1.id = T2.id where T1.id = 0",
connectContext
Expand All @@ -144,12 +146,11 @@ void testInnerJoinWithFilter2() {

@Test
void testLeftOuterJoinWithLeftFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from ( select * from T1 where T1.id = 0) T1 left outer join T2 on T1.id = T2.id",
connectContext
);
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
Plan p1 = PlanChecker.from(c1)
.analyze()
.rewrite()
Expand All @@ -172,12 +173,11 @@ void testLeftOuterJoinWithLeftFilter() {

@Test
void testLeftOuterJoinWithRightFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 left outer join ( select * from T2 where T2.id = 0) T2 on T1.id = T2.id",
connectContext
);
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
Plan p1 = PlanChecker.from(c1)
.analyze()
.rewrite()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
class InferJoinTest extends SqlTestBase {
@Test
void testInnerInferLeft() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 inner join T2 on T1.id = T2.id where T1.id = 0",
connectContext
Expand Down Expand Up @@ -70,7 +70,7 @@ void testInnerInferLeft() {

@Test
void testInnerInferLeftWithFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 inner join T2 on T1.id = T2.id where T1.id = 0",
connectContext
Expand Down Expand Up @@ -103,7 +103,7 @@ void testInnerInferLeftWithFilter() {
@Disabled
@Test
void testInnerInferLeftWithJoinCond() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 inner join "
+ "(select T2.id from T2 inner join T3 on T2.id = T3.id) T2 "
Expand Down Expand Up @@ -137,12 +137,11 @@ void testInnerInferLeftWithJoinCond() {

@Test
void testLeftOuterJoinWithRightFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 left outer join ( select * from T2 where T2.id = 0) T2 on T1.id = T2.id",
connectContext
);
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES");
Plan p1 = PlanChecker.from(c1)
.analyze()
.rewrite()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
class InferPredicateTest extends SqlTestBase {
@Test
void testPullUpQueryFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 left join T2 on T1.id = T2.id where T1.id = 1",
connectContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
class PullupExpressionTest extends SqlTestBase {
@Test
void testPullUpQueryFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 join T2 on T1.id = T2.id where T1.id = 1",
connectContext
Expand All @@ -64,6 +65,7 @@ void testPullUpQueryFilter() {

@Test
void testPullUpQueryJoinCondition() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 join T2 on T1.id = T2.id and T1.score = T2.score",
connectContext
Expand All @@ -90,6 +92,7 @@ void testPullUpQueryJoinCondition() {

@Test
void testPullUpViewFilter() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 join T2 on T1.id = T2.id",
connectContext
Expand Down Expand Up @@ -117,6 +120,7 @@ void testPullUpViewFilter() {

@Test
void testPullUpViewJoinCondition() {
connectContext.getSessionVariable().setDisableNereidsRules("INFER_PREDICATES,PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select * from T1 join T2 on T1.id = T2.id ",
connectContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
class StructInfoMapTest extends SqlTestBase {
@Test
void testTableMap() throws Exception {
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select T1.id from T1 inner join T2 "
+ "on T1.id = T2.id "
Expand All @@ -60,7 +61,8 @@ public boolean isMVPartitionValid(MTMV mtmv, ConnectContext ctx) {
};
connectContext.getSessionVariable().enableMaterializedViewRewrite = true;
connectContext.getSessionVariable().enableMaterializedViewNestRewrite = true;
createMvByNereids("create materialized view mv1 BUILD IMMEDIATE REFRESH COMPLETE ON MANUAL\n"

createMvByNereids("create materialized view if not exists mv1 BUILD IMMEDIATE REFRESH COMPLETE ON MANUAL\n"
+ " DISTRIBUTED BY RANDOM BUCKETS 1\n"
+ " PROPERTIES ('replication_num' = '1') \n"
+ " as select T1.id from T1 inner join T2 "
Expand All @@ -85,6 +87,7 @@ public boolean isMVPartitionValid(MTMV mtmv, ConnectContext ctx) {

@Test
void testLazyRefresh() throws Exception {
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select T1.id from T1 inner join T2 "
+ "on T1.id = T2.id "
Expand All @@ -109,7 +112,7 @@ public boolean isMVPartitionValid(MTMV mtmv, ConnectContext ctx) {
};
connectContext.getSessionVariable().enableMaterializedViewRewrite = true;
connectContext.getSessionVariable().enableMaterializedViewNestRewrite = true;
createMvByNereids("create materialized view mv1 BUILD IMMEDIATE REFRESH COMPLETE ON MANUAL\n"
createMvByNereids("create materialized view if not exists mv1 BUILD IMMEDIATE REFRESH COMPLETE ON MANUAL\n"
+ " DISTRIBUTED BY RANDOM BUCKETS 1\n"
+ " PROPERTIES ('replication_num' = '1') \n"
+ " as select T1.id from T1 inner join T2 "
Expand All @@ -135,6 +138,7 @@ public boolean isMVPartitionValid(MTMV mtmv, ConnectContext ctx) {

@Test
void testTableChild() throws Exception {
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
CascadesContext c1 = createCascadesContext(
"select T1.id from T1 inner join T2 "
+ "on T1.id = T2.id "
Expand All @@ -149,7 +153,7 @@ public boolean isMVPartitionValid(MTMV mtmv, ConnectContext ctx) {
};
connectContext.getSessionVariable().enableMaterializedViewRewrite = true;
connectContext.getSessionVariable().enableMaterializedViewNestRewrite = true;
createMvByNereids("create materialized view mv1 BUILD IMMEDIATE REFRESH COMPLETE ON MANUAL\n"
createMvByNereids("create materialized view if not exists mv1 BUILD IMMEDIATE REFRESH COMPLETE ON MANUAL\n"
+ " DISTRIBUTED BY RANDOM BUCKETS 1\n"
+ " PROPERTIES ('replication_num' = '1') \n"
+ " as select T1.id from T1 inner join T2 "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ protected void runBeforeAll() throws Exception {
+ "distributed by hash(id) buckets 10\n"
+ "properties('replication_num' = '1');");
connectContext.setDatabase("test");
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ protected void runBeforeAll() throws Exception {
+ "distributed by hash(id) buckets 10\n"
+ "properties('replication_num' = '1');");
connectContext.setDatabase("test");
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ protected void runBeforeAll() throws Exception {
+ "distributed by hash(id) buckets 10\n"
+ "properties('replication_num' = '1');");
connectContext.setDatabase("test");
connectContext.getSessionVariable().setDisableNereidsRules("PRUNE_EMPTY_PARTITION");
}

@Test
Expand Down
Loading

0 comments on commit d87c9f6

Please sign in to comment.